From 7074062dec4bff8d523b4335900def51d2816440 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 18 Mar 2024 11:43:24 -0700 Subject: [PATCH 001/214] Fix reading BytesRef in enrich (#106431) We should use the returned value when reading BytesRef from a BytesRefBlock. However, we didn't follow this pattern when reading IP values in enrich. Relates #106186 Fixes #106430 --- .../elasticsearch/xpack/esql/enrich/QueryList.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java index 3039196a75f24..29371d81304f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/QueryList.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; -import java.net.InetAddress; import java.util.ArrayList; import java.util.List; import java.util.function.IntFunction; @@ -129,14 +128,13 @@ private IntFunction blockToJavaObject() { BytesRefBlock bytesRefBlock = (BytesRefBlock) block; if (inputDataType == IP) { yield offset -> { - bytesRefBlock.getBytesRef(offset, scratch); - if (ipBytes.length != scratch.length) { + final var bytes = bytesRefBlock.getBytesRef(offset, scratch); + if (ipBytes.length != bytes.length) { // Lucene only support 16-byte IP addresses, even IPv4 is encoded in 16 bytes - throw new IllegalStateException("Cannot decode IP field from bytes of length " + scratch.length); + throw new IllegalStateException("Cannot decode IP field from bytes of length " + bytes.length); } - System.arraycopy(scratch.bytes, scratch.offset, ipBytes, 0, scratch.length); - InetAddress ip = InetAddressPoint.decode(ipBytes); - return ip; + System.arraycopy(bytes.bytes, bytes.offset, ipBytes, 0, bytes.length); + return InetAddressPoint.decode(ipBytes); }; } yield offset -> bytesRefBlock.getBytesRef(offset, new BytesRef()); From 157ce539aaf1ada26cd81dcc472e6d904ac41ba4 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:02:54 -0600 Subject: [PATCH 002/214] (DOC+) Version API page for ES API Base URL (#105845) * (DOC+) Version API page for ES API Base URL Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- docs/reference/rest-api/index.asciidoc | 2 + docs/reference/rest-api/root.asciidoc | 117 +++++++++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 docs/reference/rest-api/root.asciidoc diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 6395c8800bb39..fa0d3babb3a0c 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -44,6 +44,7 @@ not be included yet. * <> * <> * <> +* <> * <> * <> * <> @@ -93,6 +94,7 @@ include::{es-repo-dir}/query-rules/apis/index.asciidoc[] include::{es-repo-dir}/indices/apis/reload-analyzers.asciidoc[] include::{es-repo-dir}/repositories-metering-api/repositories-metering-apis.asciidoc[] include::{es-repo-dir}/rollup/rollup-apis.asciidoc[] +include::{es-repo-dir}/rest-api/root.asciidoc[] include::{es-repo-dir}/scripting/apis/script-apis.asciidoc[] include::{es-repo-dir}/search.asciidoc[] include::{es-repo-dir}/search-application/apis/index.asciidoc[] diff --git a/docs/reference/rest-api/root.asciidoc b/docs/reference/rest-api/root.asciidoc new file mode 100644 index 0000000000000..8821981c2afe3 --- /dev/null +++ b/docs/reference/rest-api/root.asciidoc @@ -0,0 +1,117 @@ +[[rest-api-root]] +== Root API +++++ +Root API +++++ + +The Elasticsearch API's base url returns its basic build, +version, and cluster information. + +[source,console] +-------------------------------------------------- +GET / +-------------------------------------------------- + +[discrete] +[[rest-api-root-prereq]] +=== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the +`monitor`, `manage`, or `all` +<> to use this API. + +[role="child_attributes"] +[discrete] +[[rest-api-root-response-body]] +=== {api-response-body-title} + + +`name` :: +The responding <>. + +`cluster_name` :: +The responding <>. + +`cluster_uuid` :: +The responding Cluster's `uuid` as confirmed by +<>. + +`version` :: +(object) +Contains information about the running version of Elasticsearch. ++ properties of `version` +[%collapsible%open] +==== +`number` :: +Version number of responding +https://www.elastic.co/downloads/past-releases#elasticsearch[Elasticsearch release]. + +`build_flavor` :: +Build flavor, e.g. `default`. + +`build_type` :: +Build type corresponding to how +<, +e.g. `docker`, `rpm`, `tar`. + +`build_hash` :: +Elasticsearch's Git commit's SHA hash. + +`build_date` :: +Elasticsearch's Git commit's date. + +`build_snapshot` :: +If Elasticsearch's build was from a snapshot. + +`lucene_version` :: +Version number of Elasticsearch's +<>. + +`minimum_wire_compatibility_version` :: +Minimum node version with which the responding node can +communicate. Also minimum version from which you can perform +a <>. + +`minimum_index_compatibility_version` :: +Minimum index version with which the responding node can read +from disk. +==== + +[discrete] +[[rest-api-root-response-example]] +=== {api-examples-title} + +The API returns the following response: + +[source,console-result] +---- +{ + "name": "instance-0000000000", + "cluster_name": "my_test_cluster", + "cluster_uuid": "5QaxoN0pRZuOmWSxstBBwQ", + "version": { + "build_date": "2024-02-01T13:07:13.727175297Z", + "minimum_wire_compatibility_version": "7.17.0", + "build_hash": "6185ba65d27469afabc9bc951cded6c17c21e3f3", + "number": "8.12.1", + "lucene_version": "9.9.2", + "minimum_index_compatibility_version": "7.0.0", + "build_flavor": "default", + "build_snapshot": false, + "build_type": "docker" + }, + "tagline": "You Know, for Search" +} +---- +// TESTRESPONSE[s/"name": "instance-0000000000"/"name": "$body.name"/] +// TESTRESPONSE[s/"cluster_name": "my_test_cluster"/"cluster_name": "$body.cluster_name"/] +// TESTRESPONSE[s/"cluster_uuid": "5QaxoN0pRZuOmWSxstBBwQ"/"cluster_uuid": "$body.cluster_uuid"/] +// TESTRESPONSE[s/"build_date": "2024-02-01T13:07:13.727175297Z"/"build_date": "$body.version.build_date"/] +// TESTRESPONSE[s/"minimum_wire_compatibility_version": "7.17.0"/"minimum_wire_compatibility_version": "$body.version.minimum_wire_compatibility_version"/] +// TESTRESPONSE[s/"build_hash": "6185ba65d27469afabc9bc951cded6c17c21e3f3"/"build_hash": "$body.version.build_hash"/] +// TESTRESPONSE[s/"number": "8.12.1"/"number": "$body.version.number"/] +// TESTRESPONSE[s/"lucene_version": "9.9.2"/"lucene_version": "$body.version.lucene_version"/] +// TESTRESPONSE[s/"minimum_index_compatibility_version": "7.0.0"/"minimum_index_compatibility_version": "$body.version.minimum_index_compatibility_version"/] +// TESTRESPONSE[s/"build_flavor": "default"/"build_flavor": "$body.version.build_flavor"/] +// TESTRESPONSE[s/"build_snapshot": false/"build_snapshot": "$body.version.build_snapshot"/] +// TESTRESPONSE[s/"build_type": "docker"/"build_type": "$body.version.build_type"/] From 794baa4374392bc9085d78effdea76b443f85173 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 18 Mar 2024 22:16:56 +0100 Subject: [PATCH 003/214] Make ENRICH-MV test less flaky (#106434) * Make MV test less flaky Fixes #106433 * Fix result ordering --- .../resources/enrich-IT_tests_only.csv-spec | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index c97f49469fa24..1908a738c62ae 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -155,17 +155,19 @@ a:keyword | a_lang:keyword enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env -| KEEP client_ip, env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip ; -client_ip:ip | env:keyword -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.3.15 | [Development, Production] -172.21.0.5 | Development -172.21.2.113 | [Development, QA] -172.21.2.162 | [Development, QA] +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production ; enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] From 552c2ca16f112a4200aeea05cf1ff74c6bc4243f Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 18 Mar 2024 16:15:17 -0700 Subject: [PATCH 004/214] Mute testCancelRequestWhenFailingFetchingPages (#106445) Tracked at #106443 --- .../org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 3728eb624aaa0..ffbf32b7d10e7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -330,6 +330,7 @@ private void assertCancelled(ActionFuture response) throws Ex * Ensure that when some exchange requests fail, we cancel the ESQL request, and complete all * exchange sinks with the failure, despite having outstanding pages in the buffer. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106443") public void testCancelRequestWhenFailingFetchingPages() throws Exception { String coordinator = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); String dataNode = internalCluster().startDataOnlyNode(); From 1defa2cc3b3a98122ee842dbc2a6a011f372f562 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 18 Mar 2024 16:16:10 -0700 Subject: [PATCH 005/214] Mute testEmpty (#106446) Tracked at #106444 --- .../java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 9aeeb10da2ad9..c3020c510fc24 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -22,6 +22,7 @@ protected EsqlQueryResponse run(EsqlQueryRequest request) { return super.run(request); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106444") public void testEmpty() { Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("pod")).build(); client().admin() From 426c493fae606818cbed6707a5ce4e61c0aa2783 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 19 Mar 2024 18:16:11 +1100 Subject: [PATCH 006/214] Use exceptions for OperatorOnlyRegistry.checkRest (#106339) This commit changs `OperatorOnlyRegistry.checkRest` to handle failures via an exception rather than a return value and the use of a channel This fits better into the way that the `SecurityRestFilter` works (since #104291) with a dedicated `RestInterceptor` interface --- .../operator/DefaultOperatorOnlyRegistry.java | 5 ++- .../operator/OperatorOnlyRegistry.java | 25 +++++++++------ .../security/operator/OperatorPrivileges.java | 31 +++++++++++++------ .../xpack/security/SecurityTests.java | 3 +- .../DefaultOperatorPrivilegesTests.java | 13 ++++++-- 5 files changed, 51 insertions(+), 26 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java index 6cc17e418314a..39813a2af5dfd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.license.PutLicenseAction; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportRequest; @@ -79,8 +78,8 @@ public OperatorPrivilegesViolation check(String action, TransportRequest request } @Override - public OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel) { - return null; // no restrictions + public void checkRest(RestHandler restHandler, RestRequest restRequest) { + // no restrictions } private OperatorPrivilegesViolation checkClusterUpdateSettings(ClusterUpdateSettingsRequest request) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java index c72c72e144b97..f0889f1c48c75 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorOnlyRegistry.java @@ -7,7 +7,8 @@ package org.elasticsearch.xpack.security.operator; -import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.transport.TransportRequest; @@ -22,18 +23,22 @@ public interface OperatorOnlyRegistry { OperatorPrivilegesViolation check(String action, TransportRequest request); /** - * Checks to see if a given {@link RestHandler} is subject to operator-only restrictions for the REST API. Any REST API may be - * fully or partially restricted. A fully restricted REST API mandates that the implementation call restChannel.sendResponse(...) and - * return a {@link OperatorPrivilegesViolation}. A partially restricted REST API mandates that the {@link RestRequest} is marked as - * restricted so that the downstream handler can behave appropriately. For example, to restrict the REST response the implementation + * Checks to see if a given {@link RestHandler} is subject to operator-only restrictions for the REST API. + * + * Any REST API may be fully or partially restricted. + * A fully restricted REST API mandates that the implementation of this method throw an + * {@link org.elasticsearch.ElasticsearchStatusException} with an appropriate status code and error message. + * + * A partially restricted REST API mandates that the {@link RestRequest} is marked as restricted so that the downstream handler can + * behave appropriately. + * For example, to restrict the REST response the implementation * should call {@link RestRequest#markPathRestricted(String)} so that the downstream handler can properly restrict the response - * before returning to the client. Note - a partial restriction should return null. + * before returning to the client. Note - a partial restriction should not throw an exception. + * * @param restHandler The {@link RestHandler} to check for any restrictions * @param restRequest The {@link RestRequest} to check for any restrictions and mark any partially restricted REST API's - * @param restChannel The {@link RestChannel} to enforce fully restricted REST API's - * @return {@link OperatorPrivilegesViolation} iff the request was fully restricted and the response has been sent back to the client. - * else returns null. + * @throws ElasticsearchStatusException if the request should be denied in its entirety (fully restricted) */ - OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel); + void checkRest(RestHandler restHandler, RestRequest restRequest) throws ElasticsearchException; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java index eef7cbdfc7f5e..79c529eb3d7b1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/OperatorPrivileges.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.common.settings.Setting; @@ -156,28 +157,40 @@ public boolean checkRest(RestHandler restHandler, RestRequest restRequest, RestC if (false == isOperator(threadContext)) { // Only check whether request is operator-only when user is NOT an operator if (logger.isTraceEnabled()) { - Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); - final User user = authentication.getEffectiveSubject().getUser(); + final User user = getUser(threadContext); logger.trace("Checking for any operator-only REST violations for user [{}] and uri [{}]", user, restRequest.uri()); } - OperatorPrivilegesViolation violation = operatorOnlyRegistry.checkRest(restHandler, restRequest, restChannel); - if (violation != null) { + + try { + operatorOnlyRegistry.checkRest(restHandler, restRequest); + } catch (ElasticsearchException e) { if (logger.isDebugEnabled()) { - Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); - final User user = authentication.getEffectiveSubject().getUser(); logger.debug( "Found the following operator-only violation [{}] for user [{}] and uri [{}]", - violation.message(), - user, + e.getMessage(), + getUser(threadContext), restRequest.uri() ); } - return false; + throw e; + } catch (Exception e) { + logger.info( + "Unexpected exception [{}] while processing operator privileges for user [{}] and uri [{}]", + e.getMessage(), + getUser(threadContext), + restRequest.uri() + ); + throw e; } } return true; } + private static User getUser(ThreadContext threadContext) { + Authentication authentication = threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY); + return authentication.getEffectiveSubject().getUser(); + } + public void maybeInterceptRequest(ThreadContext threadContext, TransportRequest request) { if (request instanceof RestoreSnapshotRequest) { logger.debug("Intercepting [{}] for operator privileges", request); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 66b03e8dedd32..6a869377d7b07 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -54,7 +54,6 @@ import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.internal.RestExtension; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.ScriptService; @@ -177,7 +176,7 @@ public OperatorPrivilegesViolation check(String action, TransportRequest request } @Override - public OperatorPrivilegesViolation checkRest(RestHandler restHandler, RestRequest restRequest, RestChannel restChannel) { + public void checkRest(RestHandler restHandler, RestRequest restRequest) { throw new RuntimeException("boom"); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java index 22c63a4e27378..8a7602627b714 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/operator/DefaultOperatorPrivilegesTests.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.common.logging.Loggers; @@ -32,13 +33,16 @@ import org.junit.Before; import org.mockito.Mockito; +import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.elasticsearch.xpack.security.operator.OperatorPrivileges.NOOP_OPERATOR_PRIVILEGES_SERVICE; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; @@ -278,8 +282,13 @@ public void testCheckRest() { ThreadContext threadContext = new ThreadContext(settings); // not an operator - when(operatorOnlyRegistry.checkRest(restHandler, restRequest, restChannel)).thenReturn(() -> "violation!"); - assertFalse(operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext)); + doThrow(new ElasticsearchSecurityException("violation!")).when(operatorOnlyRegistry).checkRest(restHandler, restRequest); + final ElasticsearchException ex = expectThrows( + ElasticsearchException.class, + () -> operatorPrivilegesService.checkRest(restHandler, restRequest, restChannel, threadContext) + ); + assertThat(ex, instanceOf(ElasticsearchSecurityException.class)); + assertThat(ex, throwableWithMessage("violation!")); Mockito.clearInvocations(operatorOnlyRegistry); // is an operator From 5d7942dc6d415fb3be181c3be01eb9e5d962a822 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Tue, 19 Mar 2024 09:10:16 +0100 Subject: [PATCH 007/214] Revert "Revert "Exclude internal fields from job APIs. (#106115)" (#106277)" (#106382) This reverts commit 426b6106567aa3b548a7f34737134654e2296c54. --- x-pack/plugin/build.gradle | 2 + .../core/ml/action/PreviewDatafeedAction.java | 2 +- .../xpack/core/ml/action/PutJobAction.java | 11 +-- .../ml/action/ValidateJobConfigAction.java | 14 +--- .../xpack/core/ml/job/config/Job.java | 67 +++++++------------ .../xpack/core/ml/job/messages/Messages.java | 2 - .../xpack/core/ml/job/config/JobTests.java | 25 ++----- .../rest-api-spec/test/ml/jobs_crud.yml | 2 +- .../rest-api-spec/test/ml/validate.yml | 16 +---- 9 files changed, 38 insertions(+), 103 deletions(-) diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index eae3031512d4f..72e63b3255999 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -109,6 +109,8 @@ tasks.named("yamlRestTestV7CompatTransform").configure { task -> ) task.skipTest("ml/jobs_crud/Test update job", "Behaviour change #89824 - added limit filter to categorization analyzer") task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") + task.skipTest("ml/jobs_crud/Test cannot create job with model snapshot id set", "Exception type has changed.") + task.skipTest("ml/validate/Test job config is invalid because model snapshot id set", "Exception type has changed.") task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") task.skipTest( "ml/datafeeds_crud/Test update datafeed to point to different job", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index 8d4e9d25b94a3..d03a6d5c0c7c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -60,7 +60,7 @@ public static class Request extends ActionRequest implements ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("preview_datafeed_action", Request.Builder::new); static { PARSER.declareObject(Builder::setDatafeedBuilder, DatafeedConfig.STRICT_PARSER, DATAFEED_CONFIG); - PARSER.declareObject(Builder::setJobBuilder, Job.STRICT_PARSER, JOB_CONFIG); + PARSER.declareObject(Builder::setJobBuilder, Job.REST_REQUEST_PARSER, JOB_CONFIG); PARSER.declareString(Builder::setStart, START_TIME); PARSER.declareString(Builder::setEnd, END_TIME); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 400bdaa3a27ea..efb4dacd83ba4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.ml.job.messages.Messages; import java.io.IOException; -import java.util.List; import java.util.Objects; public class PutJobAction extends ActionType { @@ -35,7 +34,7 @@ private PutJobAction() { public static class Request extends AcknowledgedRequest { public static Request parseRequest(String jobId, XContentParser parser, IndicesOptions indicesOptions) { - Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.REST_REQUEST_PARSER.apply(parser, null); if (jobBuilder.getId() == null) { jobBuilder.setId(jobId); } else if (Strings.isNullOrEmpty(jobId) == false && jobId.equals(jobBuilder.getId()) == false) { @@ -58,14 +57,6 @@ public Request(Job.Builder jobBuilder) { // would occur when parsing an old job config that already had duplicate detectors. jobBuilder.validateDetectorsAreUnique(); - // Some fields cannot be set at create time - List invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings(); - if (invalidJobCreationSettings.isEmpty() == false) { - throw new IllegalArgumentException( - Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS, String.join(",", invalidJobCreationSettings)) - ); - } - this.jobBuilder = jobBuilder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index 48549ae100e36..76cba60667c32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -14,11 +14,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.core.ml.job.messages.Messages; import java.io.IOException; import java.util.Date; -import java.util.List; import java.util.Objects; public class ValidateJobConfigAction extends ActionType { @@ -32,10 +30,10 @@ protected ValidateJobConfigAction() { public static class Request extends ActionRequest { - private Job job; + private final Job job; public static Request parseRequest(XContentParser parser) { - Job.Builder jobBuilder = Job.STRICT_PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.REST_REQUEST_PARSER.apply(parser, null); // When jobs are PUT their ID must be supplied in the URL - assume this will // be valid unless an invalid job ID is specified in the JSON to be validated jobBuilder.setId(jobBuilder.getId() != null ? jobBuilder.getId() : "ok"); @@ -45,14 +43,6 @@ public static Request parseRequest(XContentParser parser) { // would occur when parsing an old job config that already had duplicate detectors. jobBuilder.validateDetectorsAreUnique(); - // Some fields cannot be set at create time - List invalidJobCreationSettings = jobBuilder.invalidCreateTimeSettings(); - if (invalidJobCreationSettings.isEmpty() == false) { - throw new IllegalArgumentException( - Messages.getMessage(Messages.JOB_CONFIG_INVALID_CREATE_SETTINGS, String.join(",", invalidJobCreationSettings)) - ); - } - return new Request(jobBuilder.build(new Date())); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index fbb1a137bdc13..8da0209e10293 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -98,8 +98,9 @@ public class Job implements SimpleDiffable, Writeable, ToXContentObject { public static final ParseField RESULTS_FIELD = new ParseField("jobs"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ObjectParser LENIENT_PARSER = createParser(true); - public static final ObjectParser STRICT_PARSER = createParser(false); + public static final ObjectParser LENIENT_PARSER = createParser(true, true); + // Use the REST request parser to parse a job passed to the API, to disallow setting internal fields. + public static final ObjectParser REST_REQUEST_PARSER = createParser(false, false); public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1); @@ -114,26 +115,12 @@ public class Job implements SimpleDiffable, Writeable, ToXContentObject { public static final long DEFAULT_MODEL_SNAPSHOT_RETENTION_DAYS = 10; public static final long DEFAULT_DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = 1; - private static ObjectParser createParser(boolean ignoreUnknownFields) { + private static ObjectParser createParser(boolean allowInternalFields, boolean ignoreUnknownFields) { ObjectParser parser = new ObjectParser<>("job_details", ignoreUnknownFields, Builder::new); parser.declareString(Builder::setId, ID); - parser.declareString(Builder::setJobType, JOB_TYPE); - parser.declareString(Builder::setJobVersion, JOB_VERSION); parser.declareStringArray(Builder::setGroups, GROUPS); parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); - parser.declareField( - Builder::setCreateTime, - p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - parser.declareField( - Builder::setFinishedTime, - p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()), - FINISHED_TIME, - ValueType.VALUE - ); parser.declareObject( Builder::setAnalysisConfig, ignoreUnknownFields ? AnalysisConfig.LENIENT_PARSER : AnalysisConfig.STRICT_PARSER, @@ -165,17 +152,35 @@ private static ObjectParser createParser(boolean ignoreUnknownFie parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); parser.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); parser.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT); - parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - parser.declareBoolean(Builder::setDeleting, DELETING); parser.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN); - parser.declareObject(Builder::setBlocked, ignoreUnknownFields ? Blocked.LENIENT_PARSER : Blocked.STRICT_PARSER, BLOCKED); parser.declareObject( Builder::setDatafeed, ignoreUnknownFields ? DatafeedConfig.LENIENT_PARSER : DatafeedConfig.STRICT_PARSER, DATAFEED_CONFIG ); + + if (allowInternalFields) { + parser.declareString(Builder::setJobType, JOB_TYPE); + parser.declareString(Builder::setJobVersion, JOB_VERSION); + parser.declareField( + Builder::setCreateTime, + p -> TimeUtils.parseTimeField(p, CREATE_TIME.getPreferredName()), + CREATE_TIME, + ValueType.VALUE + ); + parser.declareField( + Builder::setFinishedTime, + p -> TimeUtils.parseTimeField(p, FINISHED_TIME.getPreferredName()), + FINISHED_TIME, + ValueType.VALUE + ); + parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); + parser.declareStringOrNull(Builder::setModelSnapshotMinVersion, MODEL_SNAPSHOT_MIN_VERSION); + parser.declareBoolean(Builder::setDeleting, DELETING); + parser.declareObject(Builder::setBlocked, ignoreUnknownFields ? Blocked.LENIENT_PARSER : Blocked.STRICT_PARSER, BLOCKED); + } + return parser; } @@ -1020,26 +1025,6 @@ public Builder setDatafeedIndicesOptionsIfRequired(IndicesOptions indicesOptions return this; } - /** - * Return the list of fields that have been set and are invalid to - * be set when the job is created e.g. model snapshot Id should not - * be set at job creation. - * @return List of fields set fields that should not be. - */ - public List invalidCreateTimeSettings() { - List invalidCreateValues = new ArrayList<>(); - if (modelSnapshotId != null) { - invalidCreateValues.add(MODEL_SNAPSHOT_ID.getPreferredName()); - } - if (finishedTime != null) { - invalidCreateValues.add(FINISHED_TIME.getPreferredName()); - } - if (createTime != null) { - invalidCreateValues.add(CREATE_TIME.getPreferredName()); - } - return invalidCreateValues; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index ad7a6b998fafd..52c97ece1b017 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -222,8 +222,6 @@ public final class Messages { public static final String JOB_CONFIG_FUNCTION_REQUIRES_OVERFIELD = "over_field_name must be set when the ''{0}'' function is used"; public static final String JOB_CONFIG_ID_ALREADY_TAKEN = "The job cannot be created with the Id ''{0}''. The Id is already used."; public static final String JOB_CONFIG_ID_TOO_LONG = "The job id cannot contain more than {0,number,integer} characters."; - public static final String JOB_CONFIG_INVALID_CREATE_SETTINGS = - "The job is configured with fields [{0}] that are illegal to set at job creation"; public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = "Invalid field name ''{0}''. Field names including over, by and partition " + "fields cannot contain any of these characters: {1}"; public static final String JOB_CONFIG_INVALID_FIELDNAME = diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 4fff2804f9350..047f3a418c36b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -43,7 +43,6 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -101,7 +100,7 @@ protected Writeable.Reader instanceReader() { @Override protected Job doParseInstance(XContentParser parser) { - return Job.STRICT_PARSER.apply(parser, null).build(); + return Job.LENIENT_PARSER.apply(parser, null).build(); } public void testToXContentForInternalStorage() throws IOException { @@ -119,10 +118,10 @@ public void testToXContentForInternalStorage() throws IOException { } } - public void testFutureConfigParse() throws IOException { + public void testRestRequestParser_DoesntAllowInternalFields() throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, FUTURE_JOB); - XContentParseException e = expectThrows(XContentParseException.class, () -> Job.STRICT_PARSER.apply(parser, null).build()); - assertEquals("[4:5] [job_details] unknown field [tomorrows_technology_today]", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> Job.REST_REQUEST_PARSER.apply(parser, null).build()); + assertEquals("[3:5] [job_details] unknown field [create_time]", e.getMessage()); } public void testFutureMetadataParse() throws IOException { @@ -554,22 +553,6 @@ public void testBuilder_givenTimeFieldInAnalysisConfig() { assertThat(e.getMessage(), equalTo(Messages.getMessage(Messages.JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG))); } - public void testInvalidCreateTimeSettings() { - Job.Builder builder = new Job.Builder("invalid-settings"); - builder.setModelSnapshotId("snapshot-foo"); - assertEquals(Collections.singletonList(Job.MODEL_SNAPSHOT_ID.getPreferredName()), builder.invalidCreateTimeSettings()); - - builder.setCreateTime(new Date()); - builder.setFinishedTime(new Date()); - - Set expected = new HashSet<>(); - expected.add(Job.CREATE_TIME.getPreferredName()); - expected.add(Job.FINISHED_TIME.getPreferredName()); - expected.add(Job.MODEL_SNAPSHOT_ID.getPreferredName()); - - assertEquals(expected, new HashSet<>(builder.invalidCreateTimeSettings())); - } - public void testEmptyGroup() { Job.Builder builder = buildJobBuilder("foo"); builder.setGroups(Arrays.asList("foo-group", "")); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml index 3c4439444d1a1..24e869781f677 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -1130,7 +1130,7 @@ "Test cannot create job with model snapshot id set": - do: - catch: /illegal_argument_exception/ + catch: /x_content_parse_exception/ ml.put_job: job_id: has-model-snapshot-id body: > diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml index 1df34a64f860a..a2cfb65b08a11 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml @@ -76,21 +76,7 @@ "Test job config is invalid because model snapshot id set": - do: - catch: /illegal_argument_exception/ - ml.validate: - body: > - { - "model_snapshot_id": "wont-create-with-this-setting", - "analysis_config" : { - "bucket_span": "1h", - "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] - }, - "data_description" : { - } - } - - - do: - catch: /The job is configured with fields \[model_snapshot_id\] that are illegal to set at job creation/ + catch: /x_content_parse_exception/ ml.validate: body: > { From e4bf51d59c7ab1a8a9cd576cadd6c1683ce8cbb0 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 19 Mar 2024 09:10:46 +0000 Subject: [PATCH 008/214] Refactor how roundup parsers are created (#106353) --- .../common/time/DateFormatters.java | 4 +- .../common/time/DateTimeParser.java | 5 - .../elasticsearch/common/time/EpochTime.java | 4 +- .../common/time/JavaDateFormatter.java | 113 ++++++++---------- .../common/time/JavaTimeDateTimeParser.java | 25 ++-- 5 files changed, 70 insertions(+), 81 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 80811fbbd1b80..7dae11fb8d720 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -1538,13 +1538,13 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p */ private static final DateFormatter DATE = newDateFormatter( "date", - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.STRICT), + DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT), DATE_FORMATTER ); // only the formatter, nothing optional here private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder().append( - DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT) + DateTimeFormatter.ISO_LOCAL_DATE.withLocale(Locale.ROOT).withResolverStyle(ResolverStyle.LENIENT) ) .appendLiteral('T') .appendPattern("HH:mm") diff --git a/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java index 7c37f4a3c569e..a40fee58ceeb2 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateTimeParser.java @@ -9,7 +9,6 @@ package org.elasticsearch.common.time; import java.time.ZoneId; -import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; import java.util.Locale; @@ -23,14 +22,10 @@ interface DateTimeParser { Locale getLocale(); - String getFormatString(); - DateTimeParser withZone(ZoneId zone); DateTimeParser withLocale(Locale locale); - void applyToBuilder(DateTimeFormatterBuilder builder); - /** * Parses the specified string. *

diff --git a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java index 2c8ef4e48411f..6b80aa9abf6aa 100644 --- a/server/src/main/java/org/elasticsearch/common/time/EpochTime.java +++ b/server/src/main/java/org/elasticsearch/common/time/EpochTime.java @@ -252,7 +252,7 @@ public long getFrom(TemporalAccessor temporal) { static final DateFormatter SECONDS_FORMATTER = new JavaDateFormatter( "epoch_second", new JavaTimeDateTimePrinter(SECONDS_FORMATTER1), - (builder, parser) -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L), + JavaTimeDateTimeParser.createRoundUpParserGenerator(builder -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L)), new JavaTimeDateTimeParser(SECONDS_FORMATTER1), new JavaTimeDateTimeParser(SECONDS_FORMATTER2) ); @@ -260,7 +260,7 @@ public long getFrom(TemporalAccessor temporal) { static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter( "epoch_millis", new JavaTimeDateTimePrinter(MILLISECONDS_FORMATTER1), - (builder, parser) -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L), + JavaTimeDateTimeParser.createRoundUpParserGenerator(builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L)), new JavaTimeDateTimeParser(MILLISECONDS_FORMATTER1), new JavaTimeDateTimeParser(MILLISECONDS_FORMATTER2) ); diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index c3a25cb4e15b5..9c39ee51276d7 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -17,29 +17,40 @@ import java.time.temporal.IsoFields; import java.time.temporal.TemporalAccessor; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.function.BiConsumer; import java.util.function.UnaryOperator; class JavaDateFormatter implements DateFormatter { + @SuppressWarnings("unchecked") + private static T defaultRoundUp(T parser) { + if (parser instanceof JavaTimeDateTimeParser jtp) { + return (T) defaultRoundUp(jtp); + } + throw new IllegalArgumentException("Unknown parser implementation " + parser.getClass()); + } + /** - * A default consumer that allows to round up fields (used for range searches, optional fields missing) - * it relies on toString implementation of DateTimeFormatter and ChronoField. - * For instance for pattern - * the parser would have a toString() - * - * Value(MonthOfYear,2)'/'Value(DayOfMonth,2)'/'Value(YearOfEra,4,19,EXCEEDS_PAD)' - * 'Value(ClockHourOfAmPm,2)':'Value(MinuteOfHour,2)' 'Text(AmPmOfDay,SHORT) - * - * and ChronoField.CLOCK_HOUR_OF_AMPM would have toString() ClockHourOfAmPm - * this allows the rounding logic to default CLOCK_HOUR_OF_AMPM field instead of HOUR_OF_DAY - * without this logic, the rounding would result in a conflict as HOUR_OF_DAY would be missing, but CLOCK_HOUR_OF_AMPM would be provided - */ - private static final BiConsumer DEFAULT_ROUND_UP = (builder, parser) -> { - String parserAsString = parser.getFormatString(); + * A default transform that allows to round up fields (used for range searches, optional fields missing) + * it relies on toString implementation of DateTimeFormatter and ChronoField. + * For instance for pattern + * the parser would have a toString() + * + * Value(MonthOfYear,2)'/'Value(DayOfMonth,2)'/'Value(YearOfEra,4,19,EXCEEDS_PAD)' + * 'Value(ClockHourOfAmPm,2)':'Value(MinuteOfHour,2)' 'Text(AmPmOfDay,SHORT) + * + * and ChronoField.CLOCK_HOUR_OF_AMPM would have toString() ClockHourOfAmPm + * this allows the rounding logic to default CLOCK_HOUR_OF_AMPM field instead of HOUR_OF_DAY + * without this logic, the rounding would result in a conflict as HOUR_OF_DAY would be missing, but CLOCK_HOUR_OF_AMPM would be provided + */ + private static JavaTimeDateTimeParser defaultRoundUp(JavaTimeDateTimeParser parser) { + DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); + builder.append(parser.formatter()); + + String parserAsString = parser.formatter().toString(); if (parserAsString.contains(ChronoField.DAY_OF_YEAR.toString())) { builder.parseDefaulting(ChronoField.DAY_OF_YEAR, 1L); // TODO ideally we should make defaulting for weekbased year here too, @@ -63,7 +74,9 @@ class JavaDateFormatter implements DateFormatter { builder.parseDefaulting(ChronoField.MINUTE_OF_HOUR, 59L); builder.parseDefaulting(ChronoField.SECOND_OF_MINUTE, 59L); builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L); - }; + + return new JavaTimeDateTimeParser(builder.toFormatter(parser.getLocale())); + } private final String format; private final DateTimePrinter printer; @@ -72,38 +85,38 @@ class JavaDateFormatter implements DateFormatter { // named formatters use default roundUpParser JavaDateFormatter(String format, DateTimePrinter printer, DateTimeParser... parsers) { - this( - format, - printer, - // set up base fields which should be used for default parsing, when we round up for date math - DEFAULT_ROUND_UP, - parsers - ); + this(format, printer, JavaDateFormatter::defaultRoundUp, parsers); } - JavaDateFormatter( + @SafeVarargs + @SuppressWarnings("varargs") // parsers array is read-only, and not stored in any fields, so this is safe + JavaDateFormatter( String format, DateTimePrinter printer, - BiConsumer roundupParserConsumer, - DateTimeParser... parsers + UnaryOperator generateRoundUpParser, + T... parsers ) { + if (format.contains("||")) { + throw new IllegalArgumentException("This class cannot handle multiple format specifiers"); + } if (printer == null) { throw new IllegalArgumentException("printer may not be null"); } if (parsers.length == 0) { throw new IllegalArgumentException("parsers need to be specified"); } + verifyPrinterParsers(printer, parsers); + this.printer = printer; this.format = format; - this.parsers = parsersArray(parsers); - this.roundupParsers = createRoundUpParsers(format, roundupParserConsumer, locale(), this.parsers); + this.parsers = Arrays.copyOf(parsers, parsers.length, DateTimeParser[].class); + this.roundupParsers = mapParsers(generateRoundUpParser, parsers); } - private static DateTimeParser[] parsersArray(DateTimeParser[] parsers) { - final ZoneId zoneId = parsers[0].getZone(); - final Locale locale = parsers[0].getLocale(); - for (int i = 1; i < parsers.length; i++) { - final DateTimeParser parser = parsers[i]; + private static void verifyPrinterParsers(DateTimePrinter printer, DateTimeParser[] parsers) { + ZoneId zoneId = printer.getZone(); + Locale locale = printer.getLocale(); + for (DateTimeParser parser : parsers) { if (Objects.equals(parser.getZone(), zoneId) == false) { throw new IllegalArgumentException("formatters must have the same time zone"); } @@ -111,31 +124,6 @@ private static DateTimeParser[] parsersArray(DateTimeParser[] parsers) { throw new IllegalArgumentException("formatters must have the same locale"); } } - return parsers; - } - - /** - * This is when the RoundUp Formatters are created. In further merges (with ||) it will only append them to a list. - * || is not expected to be provided as format when a RoundUp formatter is created. It will be splitted before in - * DateFormatter.forPattern - * JavaDateFormatter created with a custom format like DateFormatter.forPattern("YYYY") will only have one parser - * It is however possible to have a JavaDateFormatter with multiple parsers. For instance see a "date_time" formatter in - * DateFormatters. - * This means that we need to also have multiple RoundUp parsers. - */ - private static DateTimeParser[] createRoundUpParsers( - String format, - BiConsumer roundupParserConsumer, - Locale locale, - DateTimeParser[] parsers - ) { - assert format.contains("||") == false; - return mapObjects(parser -> { - DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); - parser.applyToBuilder(builder); - roundupParserConsumer.accept(builder, parser); - return new JavaTimeDateTimeParser(builder.toFormatter(locale)); - }, parsers); } static DateFormatter combined(String input, List formatters) { @@ -232,13 +220,14 @@ private JavaDateFormatter mapParsers(UnaryOperator printerMappi return new JavaDateFormatter( format, printerMapping.apply(printer), - mapObjects(parserMapping, this.roundupParsers), - mapObjects(parserMapping, this.parsers) + mapParsers(parserMapping, this.roundupParsers), + mapParsers(parserMapping, this.parsers) ); } - private static T[] mapObjects(UnaryOperator mapping, T[] objects) { - T[] res = objects.clone(); + @SafeVarargs + private static DateTimeParser[] mapParsers(UnaryOperator mapping, T... objects) { + DateTimeParser[] res = new DateTimeParser[objects.length]; for (int i = 0; i < objects.length; i++) { res[i] = mapping.apply(objects[i]); } diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java index c473b81771a53..793b97b3fa472 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaTimeDateTimeParser.java @@ -15,15 +15,30 @@ import java.time.temporal.TemporalAccessor; import java.util.Locale; import java.util.Optional; +import java.util.function.Consumer; +import java.util.function.UnaryOperator; class JavaTimeDateTimeParser implements DateTimeParser { + static UnaryOperator createRoundUpParserGenerator(Consumer modifyBuilder) { + return p -> { + var builder = new DateTimeFormatterBuilder(); + builder.append(p.formatter); + modifyBuilder.accept(builder); + return new JavaTimeDateTimeParser(builder.toFormatter(p.getLocale())); + }; + } + private final DateTimeFormatter formatter; JavaTimeDateTimeParser(DateTimeFormatter formatter) { this.formatter = formatter; } + DateTimeFormatter formatter() { + return formatter; + } + @Override public ZoneId getZone() { return formatter.getZone(); @@ -34,11 +49,6 @@ public Locale getLocale() { return formatter.getLocale(); } - @Override - public String getFormatString() { - return formatter.toString(); - } - @Override public DateTimeParser withZone(ZoneId zone) { return new JavaTimeDateTimeParser(formatter.withZone(zone)); @@ -49,11 +59,6 @@ public DateTimeParser withLocale(Locale locale) { return new JavaTimeDateTimeParser(formatter.withLocale(locale)); } - @Override - public void applyToBuilder(DateTimeFormatterBuilder builder) { - builder.append(formatter); - } - @Override public TemporalAccessor parse(CharSequence str) { return formatter.parse(str); From 8272ae2290556fb34a48a4e7cff30c642c8694ec Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 19 Mar 2024 10:31:49 +0100 Subject: [PATCH 009/214] Mute S3RepositoryThirdPartyTests.testReadFromPositionLargerThanBlobLength (#106458) Muted for investigation Relates #106457 --- .../repositories/s3/S3RepositoryThirdPartyTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 1b4ab7de0c2ff..583d1477fbaa9 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -226,6 +226,7 @@ List listMultipartUploads() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106457") public void testReadFromPositionLargerThanBlobLength() { final var blobName = randomIdentifier(); final var blobBytes = randomBytesReference(randomIntBetween(100, 2_000)); From 3af0e93315a1bde110d88c3cc44a9f126755de7a Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 19 Mar 2024 10:33:28 +0100 Subject: [PATCH 010/214] Fix `DataStreamLifecycleTests.testEffectiveRetention` (#106456) --- .../cluster/metadata/DataStreamLifecycleTests.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java index e3bf5260a7445..b266addc37407 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java @@ -317,7 +317,12 @@ public void testEffectiveRetention() { TimeValue maxRetentionLessThanDataStream = TimeValue.timeValueDays(dataStreamRetention.days() - 1); effectiveDataRetentionWithSource = lifecycleRetention.getEffectiveDataRetentionWithSource( - new DataStreamGlobalRetention(randomBoolean() ? null : TimeValue.timeValueDays(10), maxRetentionLessThanDataStream) + new DataStreamGlobalRetention( + randomBoolean() + ? null + : TimeValue.timeValueDays(randomIntBetween(1, (int) (maxRetentionLessThanDataStream.days() - 1))), + maxRetentionLessThanDataStream + ) ); assertThat(effectiveDataRetentionWithSource.v1(), equalTo(maxRetentionLessThanDataStream)); assertThat(effectiveDataRetentionWithSource.v2(), equalTo(MAX_GLOBAL_RETENTION)); From de3e2e4ef2c53e307ac16d5302fd11750dc3f0b2 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 19 Mar 2024 11:26:08 +0100 Subject: [PATCH 011/214] Remove deprecated APM setting from RunTask (#105076) --- .../org/elasticsearch/gradle/testclusters/RunTask.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 9216b538bd313..29833e1f3bb07 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -213,10 +213,9 @@ public void beforeStart() { // if metrics were not enabled explicitly for gradlew run we should disable them else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { // metrics node.setting("telemetry.metrics.enabled", "false"); - } else if (node.getSettingKeys().contains("telemetry.tracing.enabled") == false - && node.getSettingKeys().contains("tracing.apm.enabled") == false) { // tracing - node.setting("telemetry.tracing.enable", "false"); - } + } else if (node.getSettingKeys().contains("telemetry.tracing.enabled") == false) { // tracing + node.setting("telemetry.tracing.enable", "false"); + } } } From 43b8ca00865c2846346f01d834075432e1362bb6 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 19 Mar 2024 10:26:23 +0000 Subject: [PATCH 012/214] Extract `SnapshotNamePredicate` (#106397) The logic for selecting snapshots by name in `TransportGetSnapshotsAction` is a little convoluted, not easy to test, and as implemented today requires us to create several large lists or sets of snapshot IDs. This commit extracts the logic, adds a dedicated test suite, and replaces some intermediate lists with simple filtering. --- .../snapshots/get/SnapshotNamePredicate.java | 123 ++++++++++++++++ .../get/TransportGetSnapshotsAction.java | 129 ++++------------- .../get/SnapshotNamePredicateTests.java | 137 ++++++++++++++++++ 3 files changed, 292 insertions(+), 97 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicateTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java new file mode 100644 index 0000000000000..44c75e95cbd6d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicate.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.get; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.repositories.ResolvedRepositories; +import org.elasticsearch.snapshots.SnapshotMissingException; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +/** + * Represents a filter on snapshots by name, including some special values such as {@code _all} and {@code _current}, as supported by + * {@link TransportGetSnapshotsAction}. + */ +public interface SnapshotNamePredicate { + /** + * @return Whether a snapshot with the given name should be selected. + */ + boolean test(String snapshotName, boolean isCurrentSnapshot); + + /** + * @return the snapshot names that must be present in a repository. If one of these snapshots is missing then this repository should + * yield a {@link SnapshotMissingException} rather than any snapshots. + */ + Collection requiredNames(); + + /** + * A {@link SnapshotNamePredicate} which matches all snapshots (and requires no specific names). + */ + SnapshotNamePredicate MATCH_ALL = new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return true; + } + + @Override + public Collection requiredNames() { + return Collections.emptyList(); + } + }; + + /** + * A {@link SnapshotNamePredicate} which matches all currently-executing snapshots (and requires no specific names). + */ + SnapshotNamePredicate MATCH_CURRENT_ONLY = new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return isCurrentSnapshot; + } + + @Override + public Collection requiredNames() { + return Collections.emptyList(); + } + }; + + /** + * @return a {@link SnapshotNamePredicate} from the given {@link GetSnapshotsRequest} parameters. + */ + static SnapshotNamePredicate forSnapshots(boolean ignoreUnavailable, String[] snapshots) { + if (ResolvedRepositories.isMatchAll(snapshots)) { + return MATCH_ALL; + } + + if (snapshots.length == 1 && GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshots[0])) { + return MATCH_CURRENT_ONLY; + } + + final List includesBuilder = new ArrayList<>(snapshots.length); + final List excludesBuilder = new ArrayList<>(snapshots.length); + final Set requiredNamesBuilder = ignoreUnavailable ? null : Sets.newHashSetWithExpectedSize(snapshots.length); + boolean seenCurrent = false; + boolean seenWildcard = false; + for (final var snapshot : snapshots) { + if (seenWildcard && snapshot.length() > 1 && snapshot.startsWith("-")) { + excludesBuilder.add(snapshot.substring(1)); + } else { + if (Regex.isSimpleMatchPattern(snapshot)) { + seenWildcard = true; + includesBuilder.add(snapshot); + } else if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshot)) { + seenCurrent = true; + seenWildcard = true; + } else { + if (ignoreUnavailable == false) { + requiredNamesBuilder.add(snapshot); + } + includesBuilder.add(snapshot); + } + } + } + + final boolean includeCurrent = seenCurrent; + final String[] includes = includesBuilder.toArray(Strings.EMPTY_ARRAY); + final String[] excludes = excludesBuilder.toArray(Strings.EMPTY_ARRAY); + final Set requiredNames = requiredNamesBuilder == null ? Set.of() : Set.copyOf(requiredNamesBuilder); + + return new SnapshotNamePredicate() { + @Override + public boolean test(String snapshotName, boolean isCurrentSnapshot) { + return ((includeCurrent && isCurrentSnapshot) || Regex.simpleMatch(includes, snapshotName)) + && (Regex.simpleMatch(excludes, snapshotName) == false); + } + + @Override + public Collection requiredNames() { + return requiredNames; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 3563192be2eb4..45389f4aba2fa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -153,8 +153,7 @@ private class GetSnapshotsOperation { private final boolean isMultiRepoRequest; // snapshots selection - private final String[] snapshots; - private final boolean ignoreUnavailable; + private final SnapshotNamePredicate snapshotNamePredicate; private final SnapshotPredicates fromSortValuePredicates; private final Predicate slmPolicyPredicate; @@ -172,6 +171,7 @@ private class GetSnapshotsOperation { private final SnapshotsInProgress snapshotsInProgress; // output detail + private final boolean ignoreUnavailable; private final boolean verbose; private final boolean indices; @@ -204,7 +204,6 @@ private class GetSnapshotsOperation { this.cancellableTask = cancellableTask; this.repositories = resolvedRepositories.repositoryMetadata(); this.isMultiRepoRequest = isMultiRepoRequest; - this.snapshots = snapshots; this.ignoreUnavailable = ignoreUnavailable; this.sortBy = sortBy; this.order = order; @@ -216,6 +215,7 @@ private class GetSnapshotsOperation { this.verbose = verbose; this.indices = indices; + this.snapshotNamePredicate = SnapshotNamePredicate.forSnapshots(ignoreUnavailable, snapshots); this.fromSortValuePredicates = SnapshotPredicates.forFromSortValue(fromSortValue, sortBy, order); this.slmPolicyPredicate = SlmPolicyPredicate.forPolicies(policies); @@ -282,113 +282,46 @@ private boolean skipRepository(String repositoryName) { } private void getSingleRepoSnapshotInfo(String repo, ActionListener listener) { - final Map allSnapshotIds = new HashMap<>(); - final List currentSnapshots = new ArrayList<>(); - for (final SnapshotInfo snapshotInfo : currentSnapshots(repo)) { - Snapshot snapshot = snapshotInfo.snapshot(); - allSnapshotIds.put(snapshot.getSnapshotId().getName(), snapshot); - currentSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); - } - final ListenableFuture repositoryDataListener = new ListenableFuture<>(); - if (isCurrentSnapshotsOnly()) { + if (snapshotNamePredicate == SnapshotNamePredicate.MATCH_CURRENT_ONLY) { repositoryDataListener.onResponse(null); } else { repositoriesService.getRepositoryData(repo, repositoryDataListener); } repositoryDataListener.addListener( - listener.delegateFailureAndWrap( - (l, repositoryData) -> loadSnapshotInfos(repo, allSnapshotIds, currentSnapshots, repositoryData, l) - ) + listener.delegateFailureAndWrap((l, repositoryData) -> loadSnapshotInfos(repo, repositoryData, l)) ); } - /** - * Returns a list of currently running snapshots from repository sorted by snapshot creation date - * - * @param repositoryName repository name - * @return list of snapshots - */ - private List currentSnapshots(String repositoryName) { - List snapshotList = new ArrayList<>(); - List entries = SnapshotsService.currentSnapshots( - snapshotsInProgress, - repositoryName, - Collections.emptyList() - ); - for (SnapshotsInProgress.Entry entry : entries) { - snapshotList.add(SnapshotInfo.inProgress(entry)); - } - return snapshotList; - } - - private void loadSnapshotInfos( - String repo, - Map allSnapshotIds, - List currentSnapshots, - @Nullable RepositoryData repositoryData, - ActionListener listener - ) { + private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryData, ActionListener listener) { if (cancellableTask.notifyIfCancelled(listener)) { return; } - if (repositoryData != null) { - for (SnapshotId snapshotId : repositoryData.getSnapshotIds()) { - if (matchesPredicates(snapshotId, repositoryData)) { - allSnapshotIds.put(snapshotId.getName(), new Snapshot(repo, snapshotId)); - } + final Set unmatchedRequiredNames = new HashSet<>(snapshotNamePredicate.requiredNames()); + final Set toResolve = new HashSet<>(); + + for (final var snapshotInProgress : snapshotsInProgress.forRepo(repo)) { + final var snapshotName = snapshotInProgress.snapshot().getSnapshotId().getName(); + unmatchedRequiredNames.remove(snapshotName); + if (snapshotNamePredicate.test(snapshotName, true)) { + toResolve.add(snapshotInProgress.snapshot()); } } - final Set toResolve = new HashSet<>(); - if (ResolvedRepositories.isMatchAll(snapshots)) { - toResolve.addAll(allSnapshotIds.values()); - } else { - final List includePatterns = new ArrayList<>(); - final List excludePatterns = new ArrayList<>(); - boolean hasCurrent = false; - boolean seenWildcard = false; - for (String snapshotOrPattern : snapshots) { - if (seenWildcard && snapshotOrPattern.length() > 1 && snapshotOrPattern.startsWith("-")) { - excludePatterns.add(snapshotOrPattern.substring(1)); - } else { - if (Regex.isSimpleMatchPattern(snapshotOrPattern)) { - seenWildcard = true; - includePatterns.add(snapshotOrPattern); - } else if (GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshotOrPattern)) { - hasCurrent = true; - seenWildcard = true; - } else { - if (ignoreUnavailable == false && allSnapshotIds.containsKey(snapshotOrPattern) == false) { - throw new SnapshotMissingException(repo, snapshotOrPattern); - } - includePatterns.add(snapshotOrPattern); - } - } - } - final String[] includes = includePatterns.toArray(Strings.EMPTY_ARRAY); - final String[] excludes = excludePatterns.toArray(Strings.EMPTY_ARRAY); - for (Map.Entry entry : allSnapshotIds.entrySet()) { - final Snapshot snapshot = entry.getValue(); - if (toResolve.contains(snapshot) == false - && Regex.simpleMatch(includes, entry.getKey()) - && Regex.simpleMatch(excludes, entry.getKey()) == false) { - toResolve.add(snapshot); - } - } - if (hasCurrent) { - for (SnapshotInfo snapshotInfo : currentSnapshots) { - final Snapshot snapshot = snapshotInfo.snapshot(); - if (Regex.simpleMatch(excludes, snapshot.getSnapshotId().getName()) == false) { - toResolve.add(snapshot); - } + if (repositoryData != null) { + for (final var snapshotId : repositoryData.getSnapshotIds()) { + final var snapshotName = snapshotId.getName(); + unmatchedRequiredNames.remove(snapshotName); + if (snapshotNamePredicate.test(snapshotName, false) && matchesPredicates(snapshotId, repositoryData)) { + toResolve.add(new Snapshot(repo, snapshotId)); } } - if (toResolve.isEmpty() && ignoreUnavailable == false && isCurrentSnapshotsOnly() == false) { - throw new SnapshotMissingException(repo, snapshots[0]); - } + } + + if (unmatchedRequiredNames.isEmpty() == false) { + throw new SnapshotMissingException(repo, unmatchedRequiredNames.iterator().next()); } if (verbose) { @@ -396,13 +329,18 @@ private void loadSnapshotInfos( } else { assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; + final var currentSnapshots = snapshotsInProgress.forRepo(repo) + .stream() + .map(entry -> SnapshotInfo.inProgress(entry).basic()) + .toList(); + final SnapshotsInRepo snapshotInfos; if (repositoryData != null) { // want non-current snapshots as well, which are found in the repository data snapshotInfos = buildSimpleSnapshotInfos(toResolve, repo, repositoryData, currentSnapshots); } else { // only want current snapshots - snapshotInfos = sortSnapshotsWithNoOffsetOrLimit(currentSnapshots.stream().map(SnapshotInfo::basic).toList()); + snapshotInfos = sortSnapshotsWithNoOffsetOrLimit(currentSnapshots); } listener.onResponse(snapshotInfos); } @@ -487,10 +425,6 @@ public void onFailure(Exception e) { } } - private boolean isCurrentSnapshotsOnly() { - return snapshots.length == 1 && GetSnapshotsRequest.CURRENT_SNAPSHOT.equalsIgnoreCase(snapshots[0]); - } - private SnapshotsInRepo buildSimpleSnapshotInfos( final Set toResolve, final String repoName, @@ -499,8 +433,9 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( ) { List snapshotInfos = new ArrayList<>(); for (SnapshotInfo snapshotInfo : currentSnapshots) { + assert snapshotInfo.startTime() == 0L && snapshotInfo.endTime() == 0L && snapshotInfo.totalShards() == 0L : snapshotInfo; if (toResolve.remove(snapshotInfo.snapshot())) { - snapshotInfos.add(snapshotInfo.basic()); + snapshotInfos.add(snapshotInfo); } } Map> snapshotsToIndices = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicateTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicateTests.java new file mode 100644 index 0000000000000..5190e5e5083fe --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/get/SnapshotNamePredicateTests.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.get; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Set; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; + +public class SnapshotNamePredicateTests extends ESTestCase { + + private static String allSnapshotsSelector() { + return randomFrom("_all", "_ALL", "_All", "_aLl"); + } + + public void testMatchAll() { + assertSame(SnapshotNamePredicate.MATCH_ALL, createPredicate(randomBoolean())); + assertSame(SnapshotNamePredicate.MATCH_ALL, createPredicate(randomBoolean(), "*")); + assertSame(SnapshotNamePredicate.MATCH_ALL, createPredicate(randomBoolean(), allSnapshotsSelector())); + assertTrue(SnapshotNamePredicate.MATCH_ALL.test(randomIdentifier(), randomBoolean())); + } + + private static String currentSnapshotsSelector() { + return randomFrom("_current", "_CURRENT", "_Current", "_cUrReNt"); + } + + public void testMatchCurrent() { + assertSame(SnapshotNamePredicate.MATCH_CURRENT_ONLY, createPredicate(randomBoolean(), currentSnapshotsSelector())); + assertTrue(SnapshotNamePredicate.MATCH_CURRENT_ONLY.test(randomIdentifier(), true)); + assertFalse(SnapshotNamePredicate.MATCH_CURRENT_ONLY.test(randomIdentifier(), false)); + } + + public void testMatchOneNameIgnoreUnavailable() { + final var requestName = randomIdentifier(); + final var predicates = createPredicate(true, requestName); + assertTrue(predicates.test(requestName, randomBoolean())); + assertFalse(predicates.test(randomValueOtherThan(requestName, ESTestCase::randomIdentifier), randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + } + + public void testMatchOneNameRequireAvailable() { + final var requestName = randomIdentifier(); + final var predicates = createPredicate(false, requestName); + assertTrue(predicates.test(requestName, randomBoolean())); + assertFalse(predicates.test(randomValueOtherThan(requestName, ESTestCase::randomIdentifier), randomBoolean())); + assertEquals(predicates.requiredNames(), Set.of(requestName)); + } + + public void testMatchWildcard() { + final var predicates = createPredicate(randomBoolean(), "include-*"); + assertTrue(predicates.test("include-" + randomIdentifier(), randomBoolean())); + assertFalse(predicates.test("exclude-" + randomIdentifier(), randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + } + + public void testMatchWildcardAndName() { + final var requestName = randomIdentifier(); + final var predicates = createPredicate(true, "include-*", requestName); + assertTrue(predicates.test("include-" + randomIdentifier(), randomBoolean())); + assertTrue(predicates.test(requestName, randomBoolean())); + assertFalse(predicates.test("exclude-" + randomIdentifier(), randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + + assertEquals(createPredicate(false, "include-*", requestName).requiredNames(), Set.of(requestName)); + } + + public void testIncludeWildcardExcludeName() { + final var requestName = randomIdentifier(); + final var predicates = createPredicate(randomBoolean(), "include-*", "-include-" + requestName); + assertTrue(predicates.test("include-" + randomValueOtherThan(requestName, ESTestCase::randomIdentifier), randomBoolean())); + assertFalse(predicates.test("include-" + requestName, randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + } + + public void testIncludeWildcardExcludeWildcard() { + final var predicates = createPredicate(randomBoolean(), "include-*", "-include-exclude-*"); + assertTrue(predicates.test("include-" + randomIdentifier(), randomBoolean())); + assertFalse(predicates.test("exclude-" + randomIdentifier(), randomBoolean())); + assertFalse(predicates.test("include-exclude-" + randomIdentifier(), randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + } + + public void testIncludeCurrentExcludeWildcard() { + final var predicates = createPredicate(randomBoolean(), currentSnapshotsSelector(), "-exclude-*"); + assertTrue(predicates.test(randomIdentifier(), true)); + assertFalse(predicates.test("exclude-" + randomIdentifier(), randomBoolean())); + assertFalse(predicates.test(randomIdentifier(), false)); + assertThat(predicates.requiredNames(), empty()); + } + + public void testIncludeCurrentAndWildcardExcludeName() { + final var requestName = randomIdentifier(); + final var predicates = createPredicate(randomBoolean(), currentSnapshotsSelector(), "include-*", "-include-" + requestName); + assertTrue(predicates.test(randomIdentifier(), true)); + assertTrue(predicates.test("include-" + randomValueOtherThan(requestName, ESTestCase::randomIdentifier), false)); + assertFalse(predicates.test("include-" + requestName, randomBoolean())); + assertThat(predicates.requiredNames(), empty()); + } + + public void testInitialExclude() { + // NB current behaviour, but could be considered a bug? + final var requestName = "-" + randomIdentifier(); + final var predicates = createPredicate(false, requestName); + assertTrue(predicates.test(requestName, randomBoolean())); + assertThat(predicates.requiredNames(), equalTo(Set.of(requestName))); + } + + public void testHyphen() { + // NB current behaviour, but could be considered a bug? + final var predicates = createPredicate(false, "include-*", "-"); + assertTrue(predicates.test("include-" + randomIdentifier(), randomBoolean())); + assertTrue(predicates.test("-", randomBoolean())); + assertThat(predicates.requiredNames(), equalTo(Set.of("-"))); + } + + public void testAllWithExclude() { + // NB current behaviour, but could be considered a bug? + final var requestName = randomIdentifier(); + final var predicates = createPredicate(false, "_all", "-" + requestName); + assertFalse(predicates.test(randomIdentifier(), randomBoolean())); + assertTrue(predicates.test("_all", randomBoolean())); + assertTrue(predicates.test("-" + requestName, randomBoolean())); + assertThat(predicates.requiredNames(), equalTo(Set.of("_all", "-" + requestName))); + } + + private static SnapshotNamePredicate createPredicate(boolean ignoreUnavailable, String... requestSnapshots) { + return SnapshotNamePredicate.forSnapshots(ignoreUnavailable, requestSnapshots); + } +} From 963a5b876dfdc3c9c77785d4da5d060695b4258c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 19 Mar 2024 13:09:40 +0200 Subject: [PATCH 013/214] Refactor GetApiKeyResponse and QueryApiKeyResponse to return List instead of arrays (#106409) The plan is that GetApiKeyResponse and QueryApiKeyResponse will be receiving collections of api key info, optional profile uids, and optional sort values, that they will "zip" into a single list of records. Array vs List is not important in this context, but we use collections in the internal services and action handlers, so it is more consistent to also use them in the responses. --- .../action/apikey/GetApiKeyResponse.java | 68 +++++++--- .../action/apikey/QueryApiKeyResponse.java | 70 +++------- .../xpack/security/apikey/ApiKeyRestIT.java | 7 +- .../security/apikey/GetApiKeysRestIT.java | 36 ++--- .../security/authc/ApiKeyIntegTests.java | 123 ++++++++++-------- .../authc/apikey/ApiKeySingleNodeTests.java | 30 ++--- .../xpack/security/authc/ApiKeyService.java | 6 +- .../action/apikey/RestGetApiKeyAction.java | 2 +- .../security/authc/ApiKeyServiceTests.java | 34 ++--- .../apikey/RestGetApiKeyActionTests.java | 47 +++---- .../apikey/RestQueryApiKeyActionTests.java | 4 +- 11 files changed, 223 insertions(+), 204 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java index 6e484d5b04426..3cdee1faa3f11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyResponse.java @@ -29,25 +29,25 @@ */ public final class GetApiKeyResponse extends ActionResponse implements ToXContentObject { - private final ApiKey[] foundApiKeysInfo; + public static final GetApiKeyResponse EMPTY = new GetApiKeyResponse(List.of()); - public GetApiKeyResponse(Collection foundApiKeysInfo) { - Objects.requireNonNull(foundApiKeysInfo, "found_api_keys_info must be provided"); - this.foundApiKeysInfo = foundApiKeysInfo.toArray(new ApiKey[0]); - } + private final List foundApiKeyInfoList; - public static GetApiKeyResponse emptyResponse() { - return new GetApiKeyResponse(List.of()); + public GetApiKeyResponse(Collection foundApiKeyInfos) { + Objects.requireNonNull(foundApiKeyInfos, "found_api_keys_info must be provided"); + this.foundApiKeyInfoList = foundApiKeyInfos.stream().map(Item::new).toList(); } - public ApiKey[] getApiKeyInfos() { - return foundApiKeysInfo; + public List getApiKeyInfoList() { + return foundApiKeyInfoList; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().array("api_keys", (Object[]) foundApiKeysInfo); - return builder.endObject(); + builder.startObject(); + builder.field("api_keys", foundApiKeyInfoList); + builder.endObject(); + return builder; } @Override @@ -55,21 +55,49 @@ public void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); } - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("get_api_key_response", args -> { - return (args[0] == null) ? GetApiKeyResponse.emptyResponse() : new GetApiKeyResponse((List) args[0]); - }); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ApiKey.fromXContent(p), new ParseField("api_keys")); + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetApiKeyResponse that = (GetApiKeyResponse) o; + return Objects.equals(foundApiKeyInfoList, that.foundApiKeyInfoList); } - public static GetApiKeyResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); + @Override + public int hashCode() { + return Objects.hash(foundApiKeyInfoList); } @Override public String toString() { - return "GetApiKeyResponse [foundApiKeysInfo=" + foundApiKeysInfo + "]"; + return "GetApiKeyResponse{foundApiKeysInfo=" + foundApiKeyInfoList + "}"; } + public record Item(ApiKey apiKeyInfo) implements ToXContentObject { + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + apiKeyInfo.innerToXContent(builder, params); + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "Item{apiKeyInfo=" + apiKeyInfo + "}"; + } + } + + @SuppressWarnings("unchecked") + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_api_key_response", + args -> (args[0] == null) ? GetApiKeyResponse.EMPTY : new GetApiKeyResponse((List) args[0]) + ); + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ApiKey.fromXContent(p), new ParseField("api_keys")); + } + + public static GetApiKeyResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java index 9e510d48654b2..11b9163026322 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyResponse.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -27,31 +28,25 @@ */ public final class QueryApiKeyResponse extends ActionResponse implements ToXContentObject { + public static final QueryApiKeyResponse EMPTY = new QueryApiKeyResponse(0, List.of(), null); + private final long total; - private final Item[] items; + private final List foundApiKeyInfoList; private final @Nullable InternalAggregations aggregations; public QueryApiKeyResponse(long total, Collection items, @Nullable InternalAggregations aggregations) { this.total = total; Objects.requireNonNull(items, "items must be provided"); - this.items = items.toArray(new Item[0]); + this.foundApiKeyInfoList = items instanceof List ? (List) items : new ArrayList<>(items); this.aggregations = aggregations; } - public static QueryApiKeyResponse emptyResponse() { - return new QueryApiKeyResponse(0, List.of(), null); - } - public long getTotal() { return total; } - public Item[] getItems() { - return items; - } - - public int getCount() { - return items.length; + public List getApiKeyInfoList() { + return foundApiKeyInfoList; } public InternalAggregations getAggregations() { @@ -60,11 +55,13 @@ public InternalAggregations getAggregations() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject().field("total", total).field("count", items.length).array("api_keys", (Object[]) items); + builder.startObject(); + builder.field("total", total).field("count", foundApiKeyInfoList.size()).field("api_keys", foundApiKeyInfoList); if (aggregations != null) { aggregations.toXContent(builder, params); } - return builder.endObject(); + builder.endObject(); + return builder; } @Override @@ -77,44 +74,30 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; QueryApiKeyResponse that = (QueryApiKeyResponse) o; - return total == that.total && Arrays.equals(items, that.items) && Objects.equals(aggregations, that.aggregations); + return total == that.total + && Objects.equals(foundApiKeyInfoList, that.foundApiKeyInfoList) + && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { int result = Objects.hash(total); - result = 31 * result + Arrays.hashCode(items); + result = 31 * result + Objects.hash(foundApiKeyInfoList); result = 31 * result + Objects.hash(aggregations); return result; } @Override public String toString() { - return "QueryApiKeyResponse{total=" + total + ", items=" + Arrays.toString(items) + ", aggs=" + aggregations + "}"; + return "QueryApiKeyResponse{total=" + total + ", items=" + foundApiKeyInfoList + ", aggs=" + aggregations + "}"; } - public static class Item implements ToXContentObject { - private final ApiKey apiKey; - @Nullable - private final Object[] sortValues; - - public Item(ApiKey apiKey, @Nullable Object[] sortValues) { - this.apiKey = apiKey; - this.sortValues = sortValues; - } - - public ApiKey getApiKey() { - return apiKey; - } - - public Object[] getSortValues() { - return sortValues; - } + public record Item(ApiKey apiKeyInfo, @Nullable Object[] sortValues) implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - apiKey.innerToXContent(builder, params); + apiKeyInfo.innerToXContent(builder, params); if (sortValues != null && sortValues.length > 0) { builder.array("_sort", sortValues); } @@ -122,24 +105,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Item item = (Item) o; - return Objects.equals(apiKey, item.apiKey) && Arrays.equals(sortValues, item.sortValues); - } - - @Override - public int hashCode() { - int result = Objects.hash(apiKey); - result = 31 * result + Arrays.hashCode(sortValues); - return result; - } - @Override public String toString() { - return "Item{" + "apiKey=" + apiKey + ", sortValues=" + Arrays.toString(sortValues) + '}'; + return "Item{apiKeyInfo=" + apiKeyInfo + ", sortValues=" + Arrays.toString(sortValues) + '}'; } } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java index 850dfe5dffa99..08bca3ffdaeea 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java @@ -1824,9 +1824,12 @@ private void expectMetadata(final String apiKeyId, final Map exp assertOK(response); try (XContentParser parser = responseAsParser(response)) { final var apiKeyResponse = GetApiKeyResponse.fromXContent(parser); - assertThat(apiKeyResponse.getApiKeyInfos().length, equalTo(1)); + assertThat(apiKeyResponse.getApiKeyInfoList().size(), equalTo(1)); // ApiKey metadata is set to empty Map if null - assertThat(apiKeyResponse.getApiKeyInfos()[0].getMetadata(), equalTo(expectedMetadata == null ? Map.of() : expectedMetadata)); + assertThat( + apiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getMetadata(), + equalTo(expectedMetadata == null ? Map.of() : expectedMetadata) + ); } } diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java index e9dc00acf3211..a321a5a758bf3 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/apikey/GetApiKeysRestIT.java @@ -27,18 +27,17 @@ import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import javax.annotation.Nullable; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyArray; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -103,7 +102,7 @@ public void testGetApiKeysWithActiveOnlyFlag() throws Exception { // We get an empty result when no API keys active getSecurityClient().invalidateApiKeys(apiKeyId1); - assertThat(getApiKeysWithRequestParams(Map.of("active_only", "true")).getApiKeyInfos(), emptyArray()); + assertThat(getApiKeysWithRequestParams(Map.of("active_only", "true")).getApiKeyInfoList(), emptyIterable()); { // Using together with id parameter, returns 404 for inactive key @@ -166,12 +165,12 @@ public void testGetApiKeysWithActiveOnlyFlagAndMultipleUsers() throws Exception manageApiKeyUserApiKeyId ); assertThat( - getApiKeysWithRequestParams(Map.of("active_only", "true", "username", MANAGE_OWN_API_KEY_USER)).getApiKeyInfos(), - emptyArray() + getApiKeysWithRequestParams(Map.of("active_only", "true", "username", MANAGE_OWN_API_KEY_USER)).getApiKeyInfoList(), + emptyIterable() ); assertThat( - getApiKeysWithRequestParams(MANAGE_OWN_API_KEY_USER, Map.of("active_only", "true", "owner", "true")).getApiKeyInfos(), - emptyArray() + getApiKeysWithRequestParams(MANAGE_OWN_API_KEY_USER, Map.of("active_only", "true", "owner", "true")).getApiKeyInfoList(), + emptyIterable() ); // No more active API keys @@ -180,15 +179,15 @@ public void testGetApiKeysWithActiveOnlyFlagAndMultipleUsers() throws Exception assertThat( getApiKeysWithRequestParams( Map.of("active_only", "true", "username", randomFrom(MANAGE_SECURITY_USER, MANAGE_OWN_API_KEY_USER)) - ).getApiKeyInfos(), - emptyArray() + ).getApiKeyInfoList(), + emptyIterable() ); assertThat( getApiKeysWithRequestParams( randomFrom(MANAGE_SECURITY_USER, MANAGE_OWN_API_KEY_USER), Map.of("active_only", "true", "owner", "true") - ).getApiKeyInfos(), - emptyArray() + ).getApiKeyInfoList(), + emptyIterable() ); // With flag set to false, we get both inactive keys assertResponseContainsApiKeyIds( @@ -205,8 +204,8 @@ public void testInvalidateApiKey() throws Exception { setUserForRequest(request, MANAGE_SECURITY_USER); GetApiKeyResponse getApiKeyResponse = GetApiKeyResponse.fromXContent(getParser(client().performRequest(request))); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - ApiKey apiKey = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey apiKey = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKey.isInvalidated(), equalTo(false)); assertThat(apiKey.getInvalidation(), nullValue()); assertThat(apiKey.getId(), equalTo(apiKeyId0)); @@ -226,8 +225,8 @@ public void testInvalidateApiKey() throws Exception { setUserForRequest(request, MANAGE_SECURITY_USER); getApiKeyResponse = GetApiKeyResponse.fromXContent(getParser(client().performRequest(request))); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - apiKey = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + apiKey = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKey.isInvalidated(), equalTo(true)); assertThat(apiKey.getInvalidation(), notNullValue()); assertThat(apiKey.getId(), equalTo(apiKeyId0)); @@ -245,7 +244,10 @@ private GetApiKeyResponse getApiKeysWithRequestParams(String userOnRequest, Map< } private static void assertResponseContainsApiKeyIds(GetApiKeyResponse response, String... ids) { - assertThat(Arrays.stream(response.getApiKeyInfos()).map(ApiKey::getId).collect(Collectors.toList()), containsInAnyOrder(ids)); + assertThat( + response.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).map(ApiKey::getId).toList(), + containsInAnyOrder(ids) + ); } private static XContentParser getParser(Response response) throws IOException { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 5972d2b2c070f..f00b0b41c2fa7 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -137,7 +137,6 @@ import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -149,6 +148,7 @@ import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -399,7 +399,7 @@ public void testInvalidateApiKeysForApiKeyName() throws InterruptedException, Ex public void testInvalidateApiKeyWillClearApiKeyCache() throws IOException, ExecutionException, InterruptedException { final List services = Arrays.stream(internalCluster().getNodeNames()) .map(n -> internalCluster().getInstance(ApiKeyService.class, n)) - .collect(Collectors.toList()); + .toList(); // Create two API keys and authenticate with them Tuple apiKey1 = createApiKeyAndAuthenticateWithIt(); @@ -471,7 +471,7 @@ public void testDynamicDeletionInterval() throws Exception { refreshSecurityIndex(); // Get API keys to make sure remover didn't remove any yet - assertThat(getAllApiKeyInfo(client, false).length, equalTo(3)); + assertThat(getAllApiKeyInfo(client, false).size(), equalTo(3)); // Invalidate another key listener = new PlainActionFuture<>(); @@ -481,7 +481,7 @@ public void testDynamicDeletionInterval() throws Exception { refreshSecurityIndex(); // Get API keys to make sure remover didn't remove any yet (shouldn't be removed because of the long DELETE_INTERVAL) - assertThat(getAllApiKeyInfo(client, false).length, equalTo(3)); + assertThat(getAllApiKeyInfo(client, false).size(), equalTo(3)); // Update DELETE_INTERVAL to every 0 ms builder = Settings.builder(); @@ -499,7 +499,7 @@ public void testDynamicDeletionInterval() throws Exception { // Make sure all keys except the last invalidated one are deleted // There is a (tiny) risk that the remover runs after the invalidation and therefore deletes the key that was just // invalidated, so 0 or 1 keys can be returned from the get api - assertThat(getAllApiKeyInfo(client, false).length, in(Set.of(0, 1))); + assertThat(getAllApiKeyInfo(client, false).size(), in(Set.of(0, 1))); } finally { final Settings.Builder builder = Settings.builder(); builder.putNull(ApiKeyService.DELETE_INTERVAL.getKey()); @@ -516,7 +516,7 @@ private void verifyInvalidateResponse( assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(noOfApiKeys)); assertThat( invalidateResponse.getInvalidatedApiKeys(), - containsInAnyOrder(responses.stream().map(r -> r.getId()).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)) + containsInAnyOrder(responses.stream().map(CreateApiKeyResponse::getId).toArray(String[]::new)) ); assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0)); assertThat(invalidateResponse.getErrors().size(), equalTo(0)); @@ -588,7 +588,11 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr // The first API key with 1ms expiration should already be deleted Set expectedKeyIds = Sets.newHashSet(nonExpiringKey.getId(), createdApiKeys.get(0).getId(), createdApiKeys.get(1).getId()); boolean apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false; - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(nonExpiringKey.getId())) { assertThat(apiKey.isInvalidated(), is(false)); @@ -603,7 +607,7 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr } } assertThat( - getApiKeyResponseListener.get().getApiKeyInfos().length, + getApiKeyResponseListener.get().getApiKeyInfoList().size(), is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 3 : 2) ); @@ -633,7 +637,11 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr ); expectedKeyIds = Sets.newHashSet(nonExpiringKey.getId(), createdApiKeys.get(1).getId()); apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false; - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(nonExpiringKey.getId())) { assertThat(apiKey.isInvalidated(), is(false)); @@ -645,7 +653,7 @@ private void doTestInvalidKeysImmediatelyDeletedByRemover(String namePrefix) thr } } assertThat( - getApiKeyResponseListener.get().getApiKeyInfos().length, + getApiKeyResponseListener.get().getApiKeyInfoList().size(), is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 2 : 1) ); } @@ -684,7 +692,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP GetApiKeyRequest.builder().apiKeyName(namePrefix + "*").build(), getApiKeyResponseListener ); - assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length, is(noOfKeys)); + assertThat(getApiKeyResponseListener.get().getApiKeyInfoList().size(), is(noOfKeys)); // Expire the 1st key such that it cannot be deleted by the remover // hack doc to modify the expiration time @@ -783,7 +791,11 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP createdApiKeys.get(7).getId(), createdApiKeys.get(8).getId() ); - for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) { + for (ApiKey apiKey : getApiKeyResponseListener.get() + .getApiKeyInfoList() + .stream() + .map(GetApiKeyResponse.Item::apiKeyInfo) + .toList()) { assertThat(apiKey.getId(), is(in(expectedKeyIds))); if (apiKey.getId().equals(createdApiKeys.get(0).getId())) { // has been expired, not invalidated @@ -805,7 +817,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP fail("unexpected API key " + apiKey); } } - assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length, is(4)); + assertThat(getApiKeyResponseListener.get().getApiKeyInfoList().size(), is(4)); } private void refreshSecurityIndex() throws Exception { @@ -842,7 +854,7 @@ public void testActiveApiKeysWithNoExpirationNeverGetDeletedByRemover() throws E tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), Collections.singletonList(responses.get(1).getId()) ); @@ -888,7 +900,7 @@ public void testGetApiKeysForRealm() throws InterruptedException, ExecutionExcep tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), expectedValidKeyIds, invalidatedApiKeyIds ); @@ -913,7 +925,7 @@ public void testGetApiKeysForUser() throws Exception { tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), responses.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -937,7 +949,7 @@ public void testGetApiKeysForRealmAndUser() throws InterruptedException, Executi tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -961,7 +973,7 @@ public void testGetApiKeysForApiKeyId() throws InterruptedException, ExecutionEx tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1003,7 +1015,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution metadatas, List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener.get().getApiKeyInfos(), + listener.get().getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1020,7 +1032,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution tuple1.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener2.get().getApiKeyInfos(), + listener2.get().getApiKeyInfoList(), createApiKeyResponses1.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1043,7 +1055,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution metadatas, List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener3.get().getApiKeyInfos(), + listener3.get().getApiKeyInfoList(), responses.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1060,7 +1072,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution null, List.of(), List.of(), - listener4.get().getApiKeyInfos(), + listener4.get().getApiKeyInfoList(), Collections.emptySet(), null ); @@ -1077,7 +1089,7 @@ public void testGetApiKeysForApiKeyName() throws InterruptedException, Execution tuple2.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), withLimitedBy ? List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR) : null, - listener5.get().getApiKeyInfos(), + listener5.get().getApiKeyInfoList(), createApiKeyResponses2.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toSet()), null ); @@ -1121,7 +1133,6 @@ public void testGetApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedEx GetApiKeyRequest.builder().ownedByAuthenticatedUser().withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( userWithManageApiKeyRole, noOfApiKeysForUserWithManageApiKeyRole, @@ -1129,7 +1140,7 @@ public void testGetApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedEx tuple.v2(), List.of(DEFAULT_API_KEY_ROLE_DESCRIPTOR), expectedLimitedByRoleDescriptors, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1155,7 +1166,6 @@ public void testGetApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws ExecutionExce GetApiKeyRequest.builder().ownedByAuthenticatedUser().withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( "user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, @@ -1165,7 +1175,7 @@ public void testGetApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws ExecutionExce withLimitedBy ? List.of(new RoleDescriptor("manage_own_api_key_role", new String[] { "manage_own_api_key" }, null, null)) : null, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1191,7 +1201,6 @@ public void testGetApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws Exec GetApiKeyRequest.builder().realmName("file").userName("user_with_manage_own_api_key_role").withLimitedBy(withLimitedBy).build(), listener ); - GetApiKeyResponse response = listener.get(); verifyApiKeyInfos( "user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, @@ -1201,7 +1210,7 @@ public void testGetApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws Exec withLimitedBy ? List.of(new RoleDescriptor("manage_own_api_key_role", new String[] { "manage_own_api_key" }, null, null)) : null, - response.getApiKeyInfos(), + listener.get().getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null ); @@ -1451,7 +1460,7 @@ public void testApiKeyAuthorizationApiKeyMustBeAbleToRetrieveItsOwnInformationBu tuple.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), Strings.EMPTY_ARRAY, null, null)), null, - response.getApiKeyInfos(), + response.getApiKeyInfoList(), Collections.singleton(responses.get(0).getId()), null ); @@ -1503,12 +1512,13 @@ public void testApiKeyViewLimitedBy() { // Can view itself without limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses1, tuple1.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_own_api_key" }, null, null)), null, - new ApiKey[] { getApiKeyInfo(client1, apiKeyId1, false, randomBoolean()) }, + List.of(getApiKeyInfo(client1, apiKeyId1, false, randomBoolean())), Collections.singleton(apiKeyId1), null ); @@ -1538,24 +1548,26 @@ public void testApiKeyViewLimitedBy() { // View its own limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses3, tuple3.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_api_key" }, null, null)), List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR), - new ApiKey[] { getApiKeyInfo(client3, apiKeyId3, true, randomBoolean()) }, + List.of(getApiKeyInfo(client3, apiKeyId3, true, randomBoolean())), Collections.singleton(apiKeyId3), null ); // View other key's limited-by verifyApiKeyInfos( + ES_TEST_ROOT_USER, 1, responses1, tuple1.v2(), List.of(new RoleDescriptor(DEFAULT_API_KEY_ROLE_DESCRIPTOR.getName(), new String[] { "manage_own_api_key" }, null, null)), List.of(ES_TEST_ROOT_ROLE_DESCRIPTOR), - new ApiKey[] { getApiKeyInfo(client3, apiKeyId1, true, randomBoolean()) }, + List.of(getApiKeyInfo(client3, apiKeyId1, true, randomBoolean())), Collections.singleton(apiKeyId1), null ); @@ -1721,8 +1733,8 @@ public void testDerivedKeys() throws ExecutionException, InterruptedException { GetApiKeyRequest.builder().apiKeyId(key100Response.getId()).withLimitedBy().build(), future ); - assertThat(future.actionGet().getApiKeyInfos().length, equalTo(1)); - final RoleDescriptorsIntersection limitedBy = future.actionGet().getApiKeyInfos()[0].getLimitedBy(); + assertThat(future.actionGet().getApiKeyInfoList().size(), equalTo(1)); + RoleDescriptorsIntersection limitedBy = future.actionGet().getApiKeyInfoList().get(0).apiKeyInfo().getLimitedBy(); assertThat(limitedBy.roleDescriptorsList().size(), equalTo(1)); assertThat(limitedBy.roleDescriptorsList().iterator().next(), emptyIterable()); @@ -1761,8 +1773,8 @@ public void testApiKeyRunAsAnotherUserCanCreateApiKey() { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(response2.getId()).ownedByAuthenticatedUser(true).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey apiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey apiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(apiKeyInfo.getId(), equalTo(response2.getId())); assertThat(apiKeyInfo.getUsername(), equalTo(ES_TEST_ROOT_USER)); assertThat(apiKeyInfo.getRealm(), equalTo("file")); @@ -2885,8 +2897,8 @@ private ApiKey getApiKeyInfo(Client client, String apiKeyId, boolean withLimited future ); final GetApiKeyResponse getApiKeyResponse = future.actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - return getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + return getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); } else { final PlainActionFuture future = new PlainActionFuture<>(); client.execute( @@ -2895,17 +2907,17 @@ private ApiKey getApiKeyInfo(Client client, String apiKeyId, boolean withLimited future ); final QueryApiKeyResponse queryApiKeyResponse = future.actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - return queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + return queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); } } - private ApiKey[] getAllApiKeyInfo(Client client, boolean withLimitedBy) { + private List getAllApiKeyInfo(Client client, boolean withLimitedBy) { if (randomBoolean()) { final PlainActionFuture future = new PlainActionFuture<>(); client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().withLimitedBy(withLimitedBy).build(), future); final GetApiKeyResponse getApiKeyResponse = future.actionGet(); - return getApiKeyResponse.getApiKeyInfos(); + return getApiKeyResponse.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(); } else { final PlainActionFuture future = new PlainActionFuture<>(); client.execute( @@ -2914,7 +2926,7 @@ private ApiKey[] getAllApiKeyInfo(Client client, boolean withLimitedBy) { future ); final QueryApiKeyResponse queryApiKeyResponse = future.actionGet(); - return Arrays.stream(queryApiKeyResponse.getItems()).map(QueryApiKeyResponse.Item::getApiKey).toArray(ApiKey[]::new); + return queryApiKeyResponse.getApiKeyInfoList().stream().map(QueryApiKeyResponse.Item::apiKeyInfo).toList(); } } @@ -2967,7 +2979,8 @@ private void assertApiKeyNotCreated(Client client, String keyName) throws Execut 0, client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyName(keyName).ownedByAuthenticatedUser(false).build()) .get() - .getApiKeyInfos().length + .getApiKeyInfoList() + .size() ); } @@ -2977,7 +2990,7 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, List expectedLimitedByRoleDescriptors, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { @@ -2988,7 +3001,7 @@ private void verifyApiKeyInfos( metadatas, expectedRoleDescriptors, expectedLimitedByRoleDescriptors, - apiKeyInfos, + apiKeyInfos.stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), validApiKeyIds, invalidatedApiKeyIds ); @@ -3001,7 +3014,7 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, List expectedLimitedByRoleDescriptors, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { @@ -3025,16 +3038,16 @@ private void verifyApiKeyInfos( List> metadatas, List expectedRoleDescriptors, Function> expectedLimitedByRoleDescriptorsLookup, - ApiKey[] apiKeyInfos, + List apiKeyInfos, Set validApiKeyIds, List invalidatedApiKeyIds ) { - assertThat(apiKeyInfos.length, equalTo(expectedNumberOfApiKeys)); + assertThat(apiKeyInfos.size(), equalTo(expectedNumberOfApiKeys)); List expectedIds = responses.stream() .filter(o -> validApiKeyIds.contains(o.getId())) .map(o -> o.getId()) .collect(Collectors.toList()); - List actualIds = Arrays.stream(apiKeyInfos) + List actualIds = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getId()) .collect(Collectors.toList()); @@ -3043,19 +3056,19 @@ private void verifyApiKeyInfos( .filter(o -> validApiKeyIds.contains(o.getId())) .map(o -> o.getName()) .collect(Collectors.toList()); - List actualNames = Arrays.stream(apiKeyInfos) + List actualNames = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getName()) .collect(Collectors.toList()); assertThat(actualNames, containsInAnyOrder(expectedNames.toArray(Strings.EMPTY_ARRAY))); Set expectedUsernames = (validApiKeyIds.isEmpty()) ? Collections.emptySet() : Set.of(user); - Set actualUsernames = Arrays.stream(apiKeyInfos) + Set actualUsernames = apiKeyInfos.stream() .filter(o -> o.isInvalidated() == false) .map(o -> o.getUsername()) .collect(Collectors.toSet()); assertThat(actualUsernames, containsInAnyOrder(expectedUsernames.toArray(Strings.EMPTY_ARRAY))); if (invalidatedApiKeyIds != null) { - List actualInvalidatedApiKeyIds = Arrays.stream(apiKeyInfos) + List actualInvalidatedApiKeyIds = apiKeyInfos.stream() .filter(o -> o.isInvalidated()) .map(o -> o.getId()) .collect(Collectors.toList()); @@ -3073,7 +3086,7 @@ private void verifyApiKeyInfos( assertThat(apiKey.getMetadata(), equalTo(metadata == null ? Map.of() : metadata)); } } - Arrays.stream(apiKeyInfos).forEach(apiKeyInfo -> { + apiKeyInfos.stream().forEach(apiKeyInfo -> { assertThat(apiKeyInfo.getRoleDescriptors(), containsInAnyOrder(expectedRoleDescriptors.toArray(RoleDescriptor[]::new))); final List expectedLimitedByRoleDescriptors = expectedLimitedByRoleDescriptorsLookup.apply( apiKeyInfo.getUsername() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index 707e7b2846a9b..aceafaa0bfc66 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -92,7 +92,6 @@ import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; -import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; @@ -101,6 +100,7 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -148,10 +148,10 @@ public void testQueryWithExpiredKeys() throws InterruptedException { .filter(QueryBuilders.rangeQuery("expiration").from(Instant.now().toEpochMilli())) ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems().length, equalTo(1)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getId(), equalTo(id2)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getName(), equalTo("long-lived")); - assertThat(queryApiKeyResponse.getItems()[0].getSortValues(), emptyArray()); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getId(), equalTo(id2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getName(), equalTo("long-lived")); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).sortValues(), emptyArray()); } public void testCreatingApiKeyWithNoAccess() { @@ -286,8 +286,8 @@ public void testGetApiKeyWorksForTheApiKeyItself() { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).ownedByAuthenticatedUser(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos().length, equalTo(1)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getId(), equalTo(apiKeyId)); + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getId(), is(apiKeyId)); // Cannot get any other keys final ElasticsearchSecurityException e = expectThrows( @@ -613,8 +613,8 @@ public void testCreateCrossClusterApiKey() throws IOException { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).withLimitedBy(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(getApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(getApiKeyInfo.getRoleDescriptors(), contains(expectedRoleDescriptor)); assertThat(getApiKeyInfo.getLimitedBy(), nullValue()); @@ -634,8 +634,8 @@ public void testCreateCrossClusterApiKey() throws IOException { randomBoolean() ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - final ApiKey queryApiKeyInfo = queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey queryApiKeyInfo = queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(queryApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(queryApiKeyInfo.getRoleDescriptors(), contains(expectedRoleDescriptor)); assertThat(queryApiKeyInfo.getLimitedBy(), nullValue()); @@ -669,8 +669,8 @@ public void testUpdateCrossClusterApiKey() throws IOException { GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyId(apiKeyId).withLimitedBy(randomBoolean()).build() ).actionGet(); - assertThat(getApiKeyResponse.getApiKeyInfos(), arrayWithSize(1)); - final ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfos()[0]; + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + ApiKey getApiKeyInfo = getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(getApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat(getApiKeyInfo.getRoleDescriptors(), contains(originalRoleDescriptor)); assertThat(getApiKeyInfo.getLimitedBy(), nullValue()); @@ -740,8 +740,8 @@ public void testUpdateCrossClusterApiKey() throws IOException { randomBoolean() ); final QueryApiKeyResponse queryApiKeyResponse = client().execute(QueryApiKeyAction.INSTANCE, queryApiKeyRequest).actionGet(); - assertThat(queryApiKeyResponse.getItems(), arrayWithSize(1)); - final ApiKey queryApiKeyInfo = queryApiKeyResponse.getItems()[0].getApiKey(); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(1)); + final ApiKey queryApiKeyInfo = queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo(); assertThat(queryApiKeyInfo.getType(), is(ApiKey.Type.CROSS_CLUSTER)); assertThat( queryApiKeyInfo.getRoleDescriptors(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index fea0c812e7e42..799d38377af5a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -1927,7 +1927,7 @@ public void getApiKeys( Arrays.toString(apiKeyIds), activeOnly ); - listener.onResponse(GetApiKeyResponse.emptyResponse()); + listener.onResponse(GetApiKeyResponse.EMPTY); } else { listener.onResponse(new GetApiKeyResponse(apiKeyInfos)); } @@ -1940,7 +1940,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); if (frozenSecurityIndex.indexExists() == false) { logger.debug("security index does not exist"); - listener.onResponse(QueryApiKeyResponse.emptyResponse()); + listener.onResponse(QueryApiKeyResponse.EMPTY); } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); } else { @@ -1955,7 +1955,7 @@ public void queryApiKeys(SearchRequest searchRequest, boolean withLimitedBy, Act final long total = searchResponse.getHits().getTotalHits().value; if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); - listener.onResponse(QueryApiKeyResponse.emptyResponse()); + listener.onResponse(QueryApiKeyResponse.EMPTY); return; } final List apiKeyItem = Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java index cd751740dd0fb..b192c836434c2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyAction.java @@ -66,7 +66,7 @@ public RestResponse buildResponse(GetApiKeyResponse getApiKeyResponse, XContentB getApiKeyResponse.toXContent(builder, channel.request()); // return HTTP status 404 if no API key found for API key id - if (Strings.hasText(apiKeyId) && getApiKeyResponse.getApiKeyInfos().length == 0) { + if (Strings.hasText(apiKeyId) && getApiKeyResponse.getApiKeyInfoList().isEmpty()) { return new RestResponse(RestStatus.NOT_FOUND, builder); } return new RestResponse(RestStatus.OK, builder); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index df454ddffe96f..342eff842b05e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -181,6 +181,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -326,7 +327,7 @@ public void testGetApiKeys() throws Exception { verify(searchRequestBuilder).setFetchSource(eq(true)); assertThat(searchRequest.get().source().query(), is(boolQuery)); GetApiKeyResponse getApiKeyResponse = getApiKeyResponsePlainActionFuture.get(); - assertThat(getApiKeyResponse.getApiKeyInfos(), emptyArray()); + assertThat(getApiKeyResponse.getApiKeyInfoList(), emptyIterable()); } @SuppressWarnings("unchecked") @@ -407,28 +408,31 @@ public void testApiKeysOwnerRealmIdentifier() throws Exception { getApiKeyResponsePlainActionFuture ); GetApiKeyResponse getApiKeyResponse = getApiKeyResponsePlainActionFuture.get(); - assertThat(getApiKeyResponse.getApiKeyInfos().length, is(2)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealm(), is(realm1)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealmType(), is(realm1Type)); - assertThat(getApiKeyResponse.getApiKeyInfos()[0].getRealmIdentifier(), is(new RealmConfig.RealmIdentifier(realm1Type, realm1))); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealm(), is(realm2)); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealmType(), nullValue()); - assertThat(getApiKeyResponse.getApiKeyInfos()[1].getRealmIdentifier(), nullValue()); + assertThat(getApiKeyResponse.getApiKeyInfoList(), iterableWithSize(2)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealm(), is(realm1)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmType(), is(realm1Type)); + assertThat( + getApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmIdentifier(), + is(new RealmConfig.RealmIdentifier(realm1Type, realm1)) + ); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealm(), is(realm2)); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmType(), nullValue()); + assertThat(getApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmIdentifier(), nullValue()); } { PlainActionFuture queryApiKeyResponsePlainActionFuture = new PlainActionFuture<>(); service.queryApiKeys(new SearchRequest(".security"), false, queryApiKeyResponsePlainActionFuture); QueryApiKeyResponse queryApiKeyResponse = queryApiKeyResponsePlainActionFuture.get(); - assertThat(queryApiKeyResponse.getItems().length, is(2)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getRealm(), is(realm1)); - assertThat(queryApiKeyResponse.getItems()[0].getApiKey().getRealmType(), is(realm1Type)); + assertThat(queryApiKeyResponse.getApiKeyInfoList(), iterableWithSize(2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealm(), is(realm1)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmType(), is(realm1Type)); assertThat( - queryApiKeyResponse.getItems()[0].getApiKey().getRealmIdentifier(), + queryApiKeyResponse.getApiKeyInfoList().get(0).apiKeyInfo().getRealmIdentifier(), is(new RealmConfig.RealmIdentifier(realm1Type, realm1)) ); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealm(), is(realm2)); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealmType(), nullValue()); - assertThat(queryApiKeyResponse.getItems()[1].getApiKey().getRealmIdentifier(), nullValue()); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealm(), is(realm2)); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmType(), nullValue()); + assertThat(queryApiKeyResponse.getApiKeyInfoList().get(1).apiKeyInfo().getRealmIdentifier(), nullValue()); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index 76a01f100b8ad..16afd3716b113 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -44,7 +44,8 @@ import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomCrossClusterAccessRoleDescriptor; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; -import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -144,7 +145,7 @@ public void doE || getApiKeyRequest.getRealmName() != null && getApiKeyRequest.getRealmName().equals("realm-1") || getApiKeyRequest.getUserName() != null && getApiKeyRequest.getUserName().equals("user-x")) { if (replyEmptyResponse) { - listener.onResponse((Response) GetApiKeyResponse.emptyResponse()); + listener.onResponse((Response) GetApiKeyResponse.EMPTY); } else { listener.onResponse((Response) getApiKeyResponseExpected); } @@ -162,25 +163,27 @@ public void doE assertThat(restResponse.status(), (replyEmptyResponse && params.get("id") != null) ? is(RestStatus.NOT_FOUND) : is(RestStatus.OK)); final GetApiKeyResponse actual = GetApiKeyResponse.fromXContent(createParser(XContentType.JSON.xContent(), restResponse.content())); if (replyEmptyResponse) { - assertThat(actual.getApiKeyInfos().length, is(0)); + assertThat(actual.getApiKeyInfoList(), emptyIterable()); } else { assertThat( - actual.getApiKeyInfos(), - arrayContaining( - new ApiKey( - "api-key-name-1", - "api-key-id-1", - type, - creation, - expiration, - false, - null, - "user-x", - "realm-1", - "realm-type-1", - metadata, - roleDescriptors, - limitedByRoleDescriptors + actual.getApiKeyInfoList(), + contains( + new GetApiKeyResponse.Item( + new ApiKey( + "api-key-name-1", + "api-key-id-1", + type, + creation, + expiration, + false, + null, + "user-x", + "realm-1", + "realm-type-1", + metadata, + roleDescriptors, + limitedByRoleDescriptors + ) ) ) ); @@ -286,11 +289,9 @@ public void doE assertThat(restResponse.status(), is(RestStatus.OK)); final GetApiKeyResponse actual = GetApiKeyResponse.fromXContent(createParser(XContentType.JSON.xContent(), restResponse.content())); if (isGetRequestForOwnedKeysOnly) { - assertThat(actual.getApiKeyInfos().length, is(1)); - assertThat(actual.getApiKeyInfos(), arrayContaining(apiKey1)); + assertThat(actual.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), contains(apiKey1)); } else { - assertThat(actual.getApiKeyInfos().length, is(2)); - assertThat(actual.getApiKeyInfos(), arrayContaining(apiKey1, apiKey2)); + assertThat(actual.getApiKeyInfoList().stream().map(GetApiKeyResponse.Item::apiKeyInfo).toList(), contains(apiKey1, apiKey2)); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 2bb5db03f210a..1373be0b372b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -126,7 +126,7 @@ public void doE final QueryBuilder shouldQueryBuilder = boolQueryBuilder.should().get(0); assertThat(shouldQueryBuilder.getClass(), is(PrefixQueryBuilder.class)); assertThat(((PrefixQueryBuilder) shouldQueryBuilder).fieldName(), equalTo("metadata.environ")); - listener.onResponse((Response) QueryApiKeyResponse.emptyResponse()); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); } }; final RestQueryApiKeyAction restQueryApiKeyAction = new RestQueryApiKeyAction(Settings.EMPTY, mockLicenseState); @@ -190,7 +190,7 @@ public void doE equalTo(new SearchAfterBuilder().setSortValues(new String[] { "key-2048", "2021-07-01T00:00:59.000Z" })) ); - listener.onResponse((Response) QueryApiKeyResponse.emptyResponse()); + listener.onResponse((Response) QueryApiKeyResponse.EMPTY); } }; From 1fbb085bd061682e56688bc6cddbb93963373ff6 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 19 Mar 2024 13:02:34 +0100 Subject: [PATCH 014/214] ESQL: Rename `SHOW FUNCTIONS` to `META FUNCTIONS` (#106362) This renames the `show functions` command to `meta functions`. It also removes its documentation and usage tracking. --- .../esql/source-commands/show.asciidoc | 11 +- docs/reference/rest-api/usage.asciidoc | 3 +- .../rest/generative/EsqlQueryGenerator.java | 6 +- .../src/main/resources/meta.csv-spec | 223 +++ .../src/main/resources/show.csv-spec | 228 --- .../xpack/esql/action/EsqlActionIT.java | 4 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 24 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 287 +-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 6 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 287 +-- .../function/EsqlFunctionRegistry.java | 6 +- .../function/scalar/package-info.java | 2 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 20 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1562 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 13 +- .../xpack/esql/parser/EsqlBaseParser.java | 1360 +++++++------- .../parser/EsqlBaseParserBaseListener.java | 4 +- .../parser/EsqlBaseParserBaseVisitor.java | 2 +- .../esql/parser/EsqlBaseParserListener.java | 12 +- .../esql/parser/EsqlBaseParserVisitor.java | 6 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 6 +- .../MetaFunctions.java} | 6 +- .../xpack/esql/planner/Mapper.java | 6 +- .../xpack/esql/stats/FeatureMetric.java | 7 +- .../esql/parser/StatementParserTests.java | 3 +- .../esql/stats/VerifierMetricsTests.java | 45 +- .../rest-api-spec/test/esql/60_usage.yml | 7 +- 27 files changed, 2151 insertions(+), 1995 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/{show/ShowFunctions.java => meta/MetaFunctions.java} (96%) diff --git a/docs/reference/esql/source-commands/show.asciidoc b/docs/reference/esql/source-commands/show.asciidoc index 1913c60660b93..298ea5d8f92b9 100644 --- a/docs/reference/esql/source-commands/show.asciidoc +++ b/docs/reference/esql/source-commands/show.asciidoc @@ -12,7 +12,7 @@ SHOW item *Parameters* `item`:: -Can be `INFO` or experimental:[] `FUNCTIONS`. +Can only be `INFO`. *Description* @@ -20,16 +20,15 @@ The `SHOW` source command returns information about the deployment and its capabilities: * Use `SHOW INFO` to return the deployment's version, build date and hash. -* Use experimental:[] `SHOW FUNCTIONS` to return a list of all supported functions and a -synopsis of each function. *Examples* -[source.merge.styled,esql] +[source,esql] ---- -include::{esql-specs}/show.csv-spec[tag=showFunctionsFiltered] +SHOW INFO ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/show.csv-spec[tag=showFunctionsFiltered-result] + version | date | hash +8.13.0 |2024-02-23T10:04:18.123117961Z|04ba8c8db2507501c88f215e475de7b0798cb3b3 |=== diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 6bf7f2139680b..bbbd73a0523f3 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -228,7 +228,8 @@ GET /_xpack/usage "keep" : 0, "enrich" : 0, "from" : 0, - "row" : 0 + "row" : 0, + "meta" : 0 }, "queries" : { "rest" : { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java index 25530e3d744ad..fc5242714b2cc 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -30,7 +30,7 @@ public record QueryExecuted(String query, int depth, List outputSchema, public static String sourceCommand(List availabeIndices) { return switch (randomIntBetween(0, 2)) { case 0 -> from(availabeIndices); - case 1 -> showFunctions(); + case 1 -> metaFunctions(); default -> row(); }; @@ -361,8 +361,8 @@ private static String from(List availabeIndices) { return result.toString(); } - private static String showFunctions() { - return "show functions"; + private static String metaFunctions() { + return "metadata functions"; } private static String indexPattern(String indexName) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec new file mode 100644 index 0000000000000..974ea8d72b73a --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -0,0 +1,223 @@ +# TODO: switch this test to ``&format=csv&delimiter=|` output +metaFunctions#[skip:-8.13.99] +meta functions; + + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false +asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false +atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false +atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false +auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false +avg |"double avg(field:double|integer|long)" |field |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false +ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false +concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false +cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false +cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true +count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true +date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false +date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +e |double e() | null | null | null |double | "Euler’s number." | null | false | false +ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false +floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false +greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false +least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false +left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false +length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false +log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false +ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false +max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long median(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long median_absolute_deviation(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long min(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true +mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false +mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false +mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false +mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false +mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[v, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false +mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false +mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false +mv_zip |"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" |[mvLeft, mvRight, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false +now |date now() | null |null | null |date | "Returns current date and time." | null | false | false +percentile |"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false +pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false +replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false +right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false +round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false +rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false +sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false +sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false +sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false +st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false +st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false +starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false +substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false +sum |"long sum(field:double|integer|long)" |field |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true +tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false +tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false +to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" |v |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false +to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false +to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_geoshape |"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" |v |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false +to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false +to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false +to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false +to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false +to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false +to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false +; + + +metaFunctionsSynopsis#[skip:-8.13.99] +meta functions | keep synopsis; + +synopsis:keyword +"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" +"double acos(n:double|integer|long|unsigned_long)" +"double asin(n:double|integer|long|unsigned_long)" +"double atan(n:double|integer|long|unsigned_long)" +"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" +"double avg(field:double|integer|long)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" +boolean cidr_match(ip:ip, blockX...:keyword) +"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" +"keyword concat(first:keyword|text, rest...:keyword|text)" +"double cos(n:double|integer|long|unsigned_long)" +"double cosh(n:double|integer|long|unsigned_long)" +"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" +"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" +long date_extract(date_part:keyword, field:date) +keyword date_format(?format:keyword, date:date) +"date date_parse(?datePattern:keyword, dateString:keyword|text)" +"date date_trunc(interval:keyword, date:date)" +double e() +"boolean ends_with(str:keyword|text, suffix:keyword|text)" +"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" +"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +"keyword left(str:keyword|text, length:integer)" +"integer length(str:keyword|text)" +"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" +"double log10(n:double|integer|long|unsigned_long)" +"keyword|text ltrim(str:keyword|text)" +"double|integer|long max(field:double|integer|long)" +"double|integer|long median(field:double|integer|long)" +"double|integer|long median_absolute_deviation(field:double|integer|long)" +"double|integer|long min(field:double|integer|long)" +"double mv_avg(field:double|integer|long|unsigned_long)" +"keyword mv_concat(v:text|keyword, delim:text|keyword)" +"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" +"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" +"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" +date now() +"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" +double pi() +"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" +"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" +"keyword right(str:keyword|text, length:integer)" +"double round(value:double, ?decimals:integer)" +"keyword|text rtrim(str:keyword|text)" +"double sin(n:double|integer|long|unsigned_long)" +"double sinh(n:double|integer|long|unsigned_long)" +"keyword split(str:keyword|text, delim:keyword|text)" +"double sqrt(n:double|integer|long|unsigned_long)" +"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"double st_x(point:geo_point|cartesian_point)" +"double st_y(point:geo_point|cartesian_point)" +"boolean starts_with(str:keyword|text, prefix:keyword|text)" +"keyword substring(str:keyword|text, start:integer, ?length:integer)" +"long sum(field:double|integer|long)" +"double tan(n:double|integer|long|unsigned_long)" +"double tanh(n:double|integer|long|unsigned_long)" +double tau() +"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" +"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" +"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" +"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" +"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" +"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_degrees(v:double|integer|long|unsigned_long)" +"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" +"geo_point to_geopoint(v:geo_point|keyword|text)" +"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" +"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"ip to_ip(v:ip|keyword|text)" +"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_lower(str:keyword|text)" +"double to_radians(v:double|integer|long|unsigned_long)" +"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"keyword|text to_upper(str:keyword|text)" +"version to_ver(v:keyword|text|version)" +"version to_version(v:keyword|text|version)" +"keyword|text trim(str:keyword|text)" +; + + +metaFunctionsFiltered#[skip:-8.13.99] +META FUNCTIONS +| WHERE STARTS_WITH(name, "sin") +; + + name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false +sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false +; + + +// see https://github.com/elastic/elasticsearch/issues/102120 +countFunctions#[skip:-8.13.99] +meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; + +a:long | b:long | c:long +95 | 95 | 95 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 933c106c1a85b..e7981ea209ccf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -4,231 +4,3 @@ show info | stats v = count(version); v:long 1 ; - -# TODO: switch this test to ``&format=csv&delimiter=|` output -showFunctions#[skip:-8.13.99] -show functions; - - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false -asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false -atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false -atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false -auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false -avg |"double avg(field:double|integer|long)" |field |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true -case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false -ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false -coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false -concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false -cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false -count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true -count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true -date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false -e |double e() | null | null | null |double | "Euler’s number." | null | false | false -ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false -floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false -greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false -least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false -left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false -length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false -log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false -log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false -ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false -max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true -median |"double|integer|long median(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true -median_absolute_deviation|"double|integer|long median_absolute_deviation(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true -min |"double|integer|long min(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true -mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false -mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false -mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false -mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false -mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false -mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false -mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false -mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false -mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false -mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[v, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false -mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false -mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false -mv_zip |"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" |[mvLeft, mvRight, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false -now |date now() | null |null | null |date | "Returns current date and time." | null | false | false -percentile |"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true -pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false -pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false -replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false -right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false -round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false -rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false -sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false -split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false -sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false -st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true -st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false -st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false -starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false -substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false -sum |"long sum(field:double|integer|long)" |field |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false -tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false -tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false -to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false -to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" |v |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false -to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false -to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false -to_geoshape |"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" |v |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false -to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false -to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false -to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false -to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false -to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false -to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false -; - - -showFunctionsSynopsis#[skip:-8.13.99] -show functions | keep synopsis; - -synopsis:keyword -"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" -"double acos(n:double|integer|long|unsigned_long)" -"double asin(n:double|integer|long|unsigned_long)" -"double atan(n:double|integer|long|unsigned_long)" -"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" -"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" -"double avg(field:double|integer|long)" -"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" -boolean cidr_match(ip:ip, blockX...:keyword) -"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" -"keyword concat(first:keyword|text, rest...:keyword|text)" -"double cos(n:double|integer|long|unsigned_long)" -"double cosh(n:double|integer|long|unsigned_long)" -"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" -"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -long date_extract(date_part:keyword, field:date) -keyword date_format(?format:keyword, date:date) -"date date_parse(?datePattern:keyword, dateString:keyword|text)" -"date date_trunc(interval:keyword, date:date)" -double e() -"boolean ends_with(str:keyword|text, suffix:keyword|text)" -"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"keyword left(str:keyword|text, length:integer)" -"integer length(str:keyword|text)" -"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" -"double log10(n:double|integer|long|unsigned_long)" -"keyword|text ltrim(str:keyword|text)" -"double|integer|long max(field:double|integer|long)" -"double|integer|long median(field:double|integer|long)" -"double|integer|long median_absolute_deviation(field:double|integer|long)" -"double|integer|long min(field:double|integer|long)" -"double mv_avg(field:double|integer|long|unsigned_long)" -"keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" -"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" -"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" -date now() -"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" -double pi() -"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" -"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" -"keyword right(str:keyword|text, length:integer)" -"double round(value:double, ?decimals:integer)" -"keyword|text rtrim(str:keyword|text)" -"double sin(n:double|integer|long|unsigned_long)" -"double sinh(n:double|integer|long|unsigned_long)" -"keyword split(str:keyword|text, delim:keyword|text)" -"double sqrt(n:double|integer|long|unsigned_long)" -"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" -"double st_x(point:geo_point|cartesian_point)" -"double st_y(point:geo_point|cartesian_point)" -"boolean starts_with(str:keyword|text, prefix:keyword|text)" -"keyword substring(str:keyword|text, start:integer, ?length:integer)" -"long sum(field:double|integer|long)" -"double tan(n:double|integer|long|unsigned_long)" -"double tanh(n:double|integer|long|unsigned_long)" -double tau() -"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" -"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" -"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" -"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"double to_degrees(v:double|integer|long|unsigned_long)" -"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(v:geo_point|keyword|text)" -"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" -"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"ip to_ip(v:ip|keyword|text)" -"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"keyword|text to_lower(str:keyword|text)" -"double to_radians(v:double|integer|long|unsigned_long)" -"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"keyword|text to_upper(str:keyword|text)" -"version to_ver(v:keyword|text|version)" -"version to_version(v:keyword|text|version)" -"keyword|text trim(str:keyword|text)" -; - - -showFunctionsFiltered#[skip:-8.12.99] -// tag::showFunctionsFiltered[] -SHOW functions -| WHERE STARTS_WITH(name, "sin") -// end::showFunctionsFiltered[] -; - -// tag::showFunctionsFiltered-result[] - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false -sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false -// end::showFunctionsFiltered-result[] -; - - -// see https://github.com/elastic/elasticsearch/issues/102120 -countFunctions#[skip:-8.13.99] -show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; - -a:long | b:long | c:long -95 | 95 | 95 -; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 115ae54833e14..17082e9855761 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -1038,8 +1038,8 @@ public void testShowInfo() { } } - public void testShowFunctions() { - try (EsqlQueryResponse results = run("show functions")) { + public void testMetaFunctions() { + try (EsqlQueryResponse results = run("meta functions")) { assertThat( results.columns(), equalTo( diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index ee2d449b21184..bc21a60a76ed8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -10,6 +10,7 @@ GROK : 'grok' -> pushMode(EXPRESSION_MODE); INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); +META : 'meta' -> pushMode(META_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -364,13 +365,12 @@ MVEXPAND_WS ; // -// SHOW INFO +// SHOW commands // mode SHOW_MODE; SHOW_PIPE : PIPE -> type(PIPE), popMode; INFO : 'info'; -FUNCTIONS : 'functions'; SHOW_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) @@ -384,6 +384,26 @@ SHOW_WS : WS -> channel(HIDDEN) ; +// +// META commands +// +mode META_MODE; +META_PIPE : PIPE -> type(PIPE), popMode; + +FUNCTIONS : 'functions'; + +META_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +META_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +META_WS + : WS -> channel(HIDDEN) + ; + mode SETTING_MODE; SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 4bf3584737d1d..5edc646fad10e 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -8,100 +8,104 @@ GROK=7 INLINESTATS=8 KEEP=9 LIMIT=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -UNKNOWN_CMD=18 -LINE_COMMENT=19 -MULTILINE_COMMENT=20 -WS=21 -EXPLAIN_WS=22 -EXPLAIN_LINE_COMMENT=23 -EXPLAIN_MULTILINE_COMMENT=24 -PIPE=25 -STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -LAST=38 -LP=39 -IN=40 -IS=41 -LIKE=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -OPENING_BRACKET=63 -CLOSING_BRACKET=64 -UNQUOTED_IDENTIFIER=65 -QUOTED_IDENTIFIER=66 -EXPR_LINE_COMMENT=67 -EXPR_MULTILINE_COMMENT=68 -EXPR_WS=69 -METADATA=70 -FROM_UNQUOTED_IDENTIFIER=71 -FROM_LINE_COMMENT=72 -FROM_MULTILINE_COMMENT=73 -FROM_WS=74 -ID_PATTERN=75 -PROJECT_LINE_COMMENT=76 -PROJECT_MULTILINE_COMMENT=77 -PROJECT_WS=78 -AS=79 -RENAME_LINE_COMMENT=80 -RENAME_MULTILINE_COMMENT=81 -RENAME_WS=82 -ON=83 -WITH=84 -ENRICH_POLICY_NAME=85 -ENRICH_LINE_COMMENT=86 -ENRICH_MULTILINE_COMMENT=87 -ENRICH_WS=88 -ENRICH_FIELD_LINE_COMMENT=89 -ENRICH_FIELD_MULTILINE_COMMENT=90 -ENRICH_FIELD_WS=91 -MVEXPAND_LINE_COMMENT=92 -MVEXPAND_MULTILINE_COMMENT=93 -MVEXPAND_WS=94 -INFO=95 -FUNCTIONS=96 +META=11 +MV_EXPAND=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 SHOW_LINE_COMMENT=97 SHOW_MULTILINE_COMMENT=98 SHOW_WS=99 -COLON=100 -SETTING=101 -SETTING_LINE_COMMENT=102 -SETTTING_MULTILINE_COMMENT=103 -SETTING_WS=104 +FUNCTIONS=100 +META_LINE_COMMENT=101 +META_MULTILINE_COMMENT=102 +META_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 'dissect'=1 'drop'=2 'enrich'=3 @@ -112,53 +116,54 @@ SETTING_WS=104 'inlinestats'=8 'keep'=9 'limit'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'last'=38 -'('=39 -'in'=40 -'is'=41 -'like'=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -']'=64 -'metadata'=70 -'as'=79 -'on'=83 -'with'=84 -'info'=95 -'functions'=96 -':'=100 +'meta'=11 +'mv_expand'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index b34b9bb103b83..a7d0097b6aec8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -24,6 +24,7 @@ sourceCommand | fromCommand | rowCommand | showCommand + | metaCommand ; processingCommand @@ -244,7 +245,10 @@ subqueryExpression showCommand : SHOW INFO #showInfo - | SHOW FUNCTIONS #showFunctions + ; + +metaCommand + : META FUNCTIONS #metaFunctions ; enrichCommand diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 4bf3584737d1d..5edc646fad10e 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -8,100 +8,104 @@ GROK=7 INLINESTATS=8 KEEP=9 LIMIT=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -UNKNOWN_CMD=18 -LINE_COMMENT=19 -MULTILINE_COMMENT=20 -WS=21 -EXPLAIN_WS=22 -EXPLAIN_LINE_COMMENT=23 -EXPLAIN_MULTILINE_COMMENT=24 -PIPE=25 -STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -COMMA=33 -DESC=34 -DOT=35 -FALSE=36 -FIRST=37 -LAST=38 -LP=39 -IN=40 -IS=41 -LIKE=42 -NOT=43 -NULL=44 -NULLS=45 -OR=46 -PARAM=47 -RLIKE=48 -RP=49 -TRUE=50 -EQ=51 -CIEQ=52 -NEQ=53 -LT=54 -LTE=55 -GT=56 -GTE=57 -PLUS=58 -MINUS=59 -ASTERISK=60 -SLASH=61 -PERCENT=62 -OPENING_BRACKET=63 -CLOSING_BRACKET=64 -UNQUOTED_IDENTIFIER=65 -QUOTED_IDENTIFIER=66 -EXPR_LINE_COMMENT=67 -EXPR_MULTILINE_COMMENT=68 -EXPR_WS=69 -METADATA=70 -FROM_UNQUOTED_IDENTIFIER=71 -FROM_LINE_COMMENT=72 -FROM_MULTILINE_COMMENT=73 -FROM_WS=74 -ID_PATTERN=75 -PROJECT_LINE_COMMENT=76 -PROJECT_MULTILINE_COMMENT=77 -PROJECT_WS=78 -AS=79 -RENAME_LINE_COMMENT=80 -RENAME_MULTILINE_COMMENT=81 -RENAME_WS=82 -ON=83 -WITH=84 -ENRICH_POLICY_NAME=85 -ENRICH_LINE_COMMENT=86 -ENRICH_MULTILINE_COMMENT=87 -ENRICH_WS=88 -ENRICH_FIELD_LINE_COMMENT=89 -ENRICH_FIELD_MULTILINE_COMMENT=90 -ENRICH_FIELD_WS=91 -MVEXPAND_LINE_COMMENT=92 -MVEXPAND_MULTILINE_COMMENT=93 -MVEXPAND_WS=94 -INFO=95 -FUNCTIONS=96 +META=11 +MV_EXPAND=12 +RENAME=13 +ROW=14 +SHOW=15 +SORT=16 +STATS=17 +WHERE=18 +UNKNOWN_CMD=19 +LINE_COMMENT=20 +MULTILINE_COMMENT=21 +WS=22 +EXPLAIN_WS=23 +EXPLAIN_LINE_COMMENT=24 +EXPLAIN_MULTILINE_COMMENT=25 +PIPE=26 +STRING=27 +INTEGER_LITERAL=28 +DECIMAL_LITERAL=29 +BY=30 +AND=31 +ASC=32 +ASSIGN=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +LAST=39 +LP=40 +IN=41 +IS=42 +LIKE=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +OPENING_BRACKET=64 +CLOSING_BRACKET=65 +UNQUOTED_IDENTIFIER=66 +QUOTED_IDENTIFIER=67 +EXPR_LINE_COMMENT=68 +EXPR_MULTILINE_COMMENT=69 +EXPR_WS=70 +METADATA=71 +FROM_UNQUOTED_IDENTIFIER=72 +FROM_LINE_COMMENT=73 +FROM_MULTILINE_COMMENT=74 +FROM_WS=75 +ID_PATTERN=76 +PROJECT_LINE_COMMENT=77 +PROJECT_MULTILINE_COMMENT=78 +PROJECT_WS=79 +AS=80 +RENAME_LINE_COMMENT=81 +RENAME_MULTILINE_COMMENT=82 +RENAME_WS=83 +ON=84 +WITH=85 +ENRICH_POLICY_NAME=86 +ENRICH_LINE_COMMENT=87 +ENRICH_MULTILINE_COMMENT=88 +ENRICH_WS=89 +ENRICH_FIELD_LINE_COMMENT=90 +ENRICH_FIELD_MULTILINE_COMMENT=91 +ENRICH_FIELD_WS=92 +MVEXPAND_LINE_COMMENT=93 +MVEXPAND_MULTILINE_COMMENT=94 +MVEXPAND_WS=95 +INFO=96 SHOW_LINE_COMMENT=97 SHOW_MULTILINE_COMMENT=98 SHOW_WS=99 -COLON=100 -SETTING=101 -SETTING_LINE_COMMENT=102 -SETTTING_MULTILINE_COMMENT=103 -SETTING_WS=104 +FUNCTIONS=100 +META_LINE_COMMENT=101 +META_MULTILINE_COMMENT=102 +META_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 'dissect'=1 'drop'=2 'enrich'=3 @@ -112,53 +116,54 @@ SETTING_WS=104 'inlinestats'=8 'keep'=9 'limit'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -','=33 -'desc'=34 -'.'=35 -'false'=36 -'first'=37 -'last'=38 -'('=39 -'in'=40 -'is'=41 -'like'=42 -'not'=43 -'null'=44 -'nulls'=45 -'or'=46 -'?'=47 -'rlike'=48 -')'=49 -'true'=50 -'=='=51 -'=~'=52 -'!='=53 -'<'=54 -'<='=55 -'>'=56 -'>='=57 -'+'=58 -'-'=59 -'*'=60 -'/'=61 -'%'=62 -']'=64 -'metadata'=70 -'as'=79 -'on'=83 -'with'=84 -'info'=95 -'functions'=96 -':'=100 +'meta'=11 +'mv_expand'=12 +'rename'=13 +'row'=14 +'show'=15 +'sort'=16 +'stats'=17 +'where'=18 +'|'=26 +'by'=30 +'and'=31 +'asc'=32 +'='=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'last'=39 +'('=40 +'in'=41 +'is'=42 +'like'=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=65 +'metadata'=71 +'as'=80 +'on'=84 +'with'=85 +'info'=96 +'functions'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index fac0121bd05b4..b4d69cc96bffd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -94,7 +94,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.ql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.session.Configuration; @@ -243,7 +243,7 @@ public record FunctionDescription( ) { public String fullSignature() { StringBuilder builder = new StringBuilder(); - builder.append(ShowFunctions.withPipes(returnType)); + builder.append(MetaFunctions.withPipes(returnType)); builder.append(" "); builder.append(name); builder.append("("); @@ -260,7 +260,7 @@ public String fullSignature() { builder.append("..."); } builder.append(":"); - builder.append(ShowFunctions.withPipes(arg.type())); + builder.append(MetaFunctions.withPipes(arg.type())); } builder.append(")"); return builder.toString(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index c0caaf8b180ce..f30425158b1b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -73,7 +73,7 @@ * *

  • * Add your function to {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. - * This links it into the language and {@code SHOW FUNCTIONS}. Also add your function to + * This links it into the language and {@code META FUNCTIONS}. Also add your function to * {@link org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes}. This makes your function * serializable over the wire. Mostly you can copy existing implementations for both. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 20d06df68b12b..ed47b27924a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -10,6 +10,7 @@ null 'inlinestats' 'keep' 'limit' +'meta' 'mv_expand' 'rename' 'row' @@ -95,6 +96,9 @@ null null null 'info' +null +null +null 'functions' null null @@ -117,6 +121,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -202,10 +207,13 @@ MVEXPAND_LINE_COMMENT MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS COLON SETTING SETTING_LINE_COMMENT @@ -223,6 +231,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -355,10 +364,14 @@ MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS SHOW_PIPE INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +META_PIPE +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS SETTING_CLOSING_BRACKET COLON SETTING @@ -381,7 +394,8 @@ ENRICH_MODE ENRICH_FIELD_MODE MVEXPAND_MODE SHOW_MODE +META_MODE SETTING_MODE atn: -[4, 0, 104, 1147, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 4, 17, 460, 8, 17, 11, 17, 12, 17, 461, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 470, 8, 18, 10, 18, 12, 18, 473, 9, 18, 1, 18, 3, 18, 476, 8, 18, 1, 18, 3, 18, 479, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 488, 8, 19, 10, 19, 12, 19, 491, 9, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 499, 8, 20, 11, 20, 12, 20, 500, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 542, 8, 31, 1, 31, 4, 31, 545, 8, 31, 11, 31, 12, 31, 546, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 556, 8, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 563, 8, 36, 1, 37, 1, 37, 1, 37, 5, 37, 568, 8, 37, 10, 37, 12, 37, 571, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 579, 8, 37, 10, 37, 12, 37, 582, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 589, 8, 37, 1, 37, 3, 37, 592, 8, 37, 3, 37, 594, 8, 37, 1, 38, 4, 38, 597, 8, 38, 11, 38, 12, 38, 598, 1, 39, 4, 39, 602, 8, 39, 11, 39, 12, 39, 603, 1, 39, 1, 39, 5, 39, 608, 8, 39, 10, 39, 12, 39, 611, 9, 39, 1, 39, 1, 39, 4, 39, 615, 8, 39, 11, 39, 12, 39, 616, 1, 39, 4, 39, 620, 8, 39, 11, 39, 12, 39, 621, 1, 39, 1, 39, 5, 39, 626, 8, 39, 10, 39, 12, 39, 629, 9, 39, 3, 39, 631, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 4, 39, 637, 8, 39, 11, 39, 12, 39, 638, 1, 39, 1, 39, 3, 39, 643, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 771, 8, 76, 10, 76, 12, 76, 774, 9, 76, 1, 76, 1, 76, 3, 76, 778, 8, 76, 1, 76, 4, 76, 781, 8, 76, 11, 76, 12, 76, 782, 3, 76, 785, 8, 76, 1, 77, 1, 77, 4, 77, 789, 8, 77, 11, 77, 12, 77, 790, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 3, 88, 842, 8, 88, 1, 89, 4, 89, 845, 8, 89, 11, 89, 12, 89, 846, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 3, 97, 882, 8, 97, 1, 98, 1, 98, 3, 98, 886, 8, 98, 1, 98, 5, 98, 889, 8, 98, 10, 98, 12, 98, 892, 9, 98, 1, 98, 1, 98, 3, 98, 896, 8, 98, 1, 98, 4, 98, 899, 8, 98, 11, 98, 12, 98, 900, 3, 98, 903, 8, 98, 1, 99, 1, 99, 4, 99, 907, 8, 99, 11, 99, 12, 99, 908, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 117, 4, 117, 984, 8, 117, 11, 117, 12, 117, 985, 1, 117, 1, 117, 3, 117, 990, 8, 117, 1, 117, 4, 117, 993, 8, 117, 11, 117, 12, 117, 994, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 4, 148, 1132, 8, 148, 11, 148, 12, 148, 1133, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 2, 489, 580, 0, 152, 11, 1, 13, 2, 15, 3, 17, 4, 19, 5, 21, 6, 23, 7, 25, 8, 27, 9, 29, 10, 31, 11, 33, 12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51, 21, 53, 0, 55, 0, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 0, 167, 66, 169, 67, 171, 68, 173, 69, 175, 0, 177, 0, 179, 0, 181, 0, 183, 0, 185, 70, 187, 0, 189, 71, 191, 0, 193, 72, 195, 73, 197, 74, 199, 0, 201, 0, 203, 0, 205, 0, 207, 0, 209, 75, 211, 76, 213, 77, 215, 78, 217, 0, 219, 0, 221, 0, 223, 0, 225, 79, 227, 0, 229, 80, 231, 81, 233, 82, 235, 0, 237, 0, 239, 83, 241, 84, 243, 0, 245, 85, 247, 0, 249, 0, 251, 86, 253, 87, 255, 88, 257, 0, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 89, 273, 90, 275, 91, 277, 0, 279, 0, 281, 0, 283, 0, 285, 92, 287, 93, 289, 94, 291, 0, 293, 95, 295, 96, 297, 97, 299, 98, 301, 99, 303, 0, 305, 100, 307, 101, 309, 102, 311, 103, 313, 104, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1175, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 1, 53, 1, 0, 0, 0, 1, 55, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 1, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 2, 63, 1, 0, 0, 0, 2, 85, 1, 0, 0, 0, 2, 87, 1, 0, 0, 0, 2, 89, 1, 0, 0, 0, 2, 91, 1, 0, 0, 0, 2, 93, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 2, 99, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 2, 107, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 2, 117, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 2, 129, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 2, 133, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 2, 139, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 2, 143, 1, 0, 0, 0, 2, 145, 1, 0, 0, 0, 2, 147, 1, 0, 0, 0, 2, 149, 1, 0, 0, 0, 2, 151, 1, 0, 0, 0, 2, 153, 1, 0, 0, 0, 2, 155, 1, 0, 0, 0, 2, 157, 1, 0, 0, 0, 2, 159, 1, 0, 0, 0, 2, 161, 1, 0, 0, 0, 2, 163, 1, 0, 0, 0, 2, 167, 1, 0, 0, 0, 2, 169, 1, 0, 0, 0, 2, 171, 1, 0, 0, 0, 2, 173, 1, 0, 0, 0, 3, 175, 1, 0, 0, 0, 3, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 3, 181, 1, 0, 0, 0, 3, 183, 1, 0, 0, 0, 3, 185, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 4, 199, 1, 0, 0, 0, 4, 201, 1, 0, 0, 0, 4, 203, 1, 0, 0, 0, 4, 209, 1, 0, 0, 0, 4, 211, 1, 0, 0, 0, 4, 213, 1, 0, 0, 0, 4, 215, 1, 0, 0, 0, 5, 217, 1, 0, 0, 0, 5, 219, 1, 0, 0, 0, 5, 221, 1, 0, 0, 0, 5, 223, 1, 0, 0, 0, 5, 225, 1, 0, 0, 0, 5, 227, 1, 0, 0, 0, 5, 229, 1, 0, 0, 0, 5, 231, 1, 0, 0, 0, 5, 233, 1, 0, 0, 0, 6, 235, 1, 0, 0, 0, 6, 237, 1, 0, 0, 0, 6, 239, 1, 0, 0, 0, 6, 241, 1, 0, 0, 0, 6, 245, 1, 0, 0, 0, 6, 247, 1, 0, 0, 0, 6, 249, 1, 0, 0, 0, 6, 251, 1, 0, 0, 0, 6, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 7, 257, 1, 0, 0, 0, 7, 259, 1, 0, 0, 0, 7, 261, 1, 0, 0, 0, 7, 263, 1, 0, 0, 0, 7, 265, 1, 0, 0, 0, 7, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 7, 271, 1, 0, 0, 0, 7, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 8, 277, 1, 0, 0, 0, 8, 279, 1, 0, 0, 0, 8, 281, 1, 0, 0, 0, 8, 283, 1, 0, 0, 0, 8, 285, 1, 0, 0, 0, 8, 287, 1, 0, 0, 0, 8, 289, 1, 0, 0, 0, 9, 291, 1, 0, 0, 0, 9, 293, 1, 0, 0, 0, 9, 295, 1, 0, 0, 0, 9, 297, 1, 0, 0, 0, 9, 299, 1, 0, 0, 0, 9, 301, 1, 0, 0, 0, 10, 303, 1, 0, 0, 0, 10, 305, 1, 0, 0, 0, 10, 307, 1, 0, 0, 0, 10, 309, 1, 0, 0, 0, 10, 311, 1, 0, 0, 0, 10, 313, 1, 0, 0, 0, 11, 315, 1, 0, 0, 0, 13, 325, 1, 0, 0, 0, 15, 332, 1, 0, 0, 0, 17, 341, 1, 0, 0, 0, 19, 348, 1, 0, 0, 0, 21, 358, 1, 0, 0, 0, 23, 365, 1, 0, 0, 0, 25, 372, 1, 0, 0, 0, 27, 386, 1, 0, 0, 0, 29, 393, 1, 0, 0, 0, 31, 401, 1, 0, 0, 0, 33, 413, 1, 0, 0, 0, 35, 422, 1, 0, 0, 0, 37, 428, 1, 0, 0, 0, 39, 435, 1, 0, 0, 0, 41, 442, 1, 0, 0, 0, 43, 450, 1, 0, 0, 0, 45, 459, 1, 0, 0, 0, 47, 465, 1, 0, 0, 0, 49, 482, 1, 0, 0, 0, 51, 498, 1, 0, 0, 0, 53, 504, 1, 0, 0, 0, 55, 509, 1, 0, 0, 0, 57, 514, 1, 0, 0, 0, 59, 518, 1, 0, 0, 0, 61, 522, 1, 0, 0, 0, 63, 526, 1, 0, 0, 0, 65, 530, 1, 0, 0, 0, 67, 532, 1, 0, 0, 0, 69, 534, 1, 0, 0, 0, 71, 537, 1, 0, 0, 0, 73, 539, 1, 0, 0, 0, 75, 548, 1, 0, 0, 0, 77, 550, 1, 0, 0, 0, 79, 555, 1, 0, 0, 0, 81, 557, 1, 0, 0, 0, 83, 562, 1, 0, 0, 0, 85, 593, 1, 0, 0, 0, 87, 596, 1, 0, 0, 0, 89, 642, 1, 0, 0, 0, 91, 644, 1, 0, 0, 0, 93, 647, 1, 0, 0, 0, 95, 651, 1, 0, 0, 0, 97, 655, 1, 0, 0, 0, 99, 657, 1, 0, 0, 0, 101, 659, 1, 0, 0, 0, 103, 664, 1, 0, 0, 0, 105, 666, 1, 0, 0, 0, 107, 672, 1, 0, 0, 0, 109, 678, 1, 0, 0, 0, 111, 683, 1, 0, 0, 0, 113, 685, 1, 0, 0, 0, 115, 688, 1, 0, 0, 0, 117, 691, 1, 0, 0, 0, 119, 696, 1, 0, 0, 0, 121, 700, 1, 0, 0, 0, 123, 705, 1, 0, 0, 0, 125, 711, 1, 0, 0, 0, 127, 714, 1, 0, 0, 0, 129, 716, 1, 0, 0, 0, 131, 722, 1, 0, 0, 0, 133, 724, 1, 0, 0, 0, 135, 729, 1, 0, 0, 0, 137, 732, 1, 0, 0, 0, 139, 735, 1, 0, 0, 0, 141, 738, 1, 0, 0, 0, 143, 740, 1, 0, 0, 0, 145, 743, 1, 0, 0, 0, 147, 745, 1, 0, 0, 0, 149, 748, 1, 0, 0, 0, 151, 750, 1, 0, 0, 0, 153, 752, 1, 0, 0, 0, 155, 754, 1, 0, 0, 0, 157, 756, 1, 0, 0, 0, 159, 758, 1, 0, 0, 0, 161, 763, 1, 0, 0, 0, 163, 784, 1, 0, 0, 0, 165, 786, 1, 0, 0, 0, 167, 794, 1, 0, 0, 0, 169, 796, 1, 0, 0, 0, 171, 800, 1, 0, 0, 0, 173, 804, 1, 0, 0, 0, 175, 808, 1, 0, 0, 0, 177, 813, 1, 0, 0, 0, 179, 817, 1, 0, 0, 0, 181, 821, 1, 0, 0, 0, 183, 825, 1, 0, 0, 0, 185, 829, 1, 0, 0, 0, 187, 841, 1, 0, 0, 0, 189, 844, 1, 0, 0, 0, 191, 848, 1, 0, 0, 0, 193, 852, 1, 0, 0, 0, 195, 856, 1, 0, 0, 0, 197, 860, 1, 0, 0, 0, 199, 864, 1, 0, 0, 0, 201, 869, 1, 0, 0, 0, 203, 873, 1, 0, 0, 0, 205, 881, 1, 0, 0, 0, 207, 902, 1, 0, 0, 0, 209, 906, 1, 0, 0, 0, 211, 910, 1, 0, 0, 0, 213, 914, 1, 0, 0, 0, 215, 918, 1, 0, 0, 0, 217, 922, 1, 0, 0, 0, 219, 927, 1, 0, 0, 0, 221, 931, 1, 0, 0, 0, 223, 935, 1, 0, 0, 0, 225, 939, 1, 0, 0, 0, 227, 942, 1, 0, 0, 0, 229, 946, 1, 0, 0, 0, 231, 950, 1, 0, 0, 0, 233, 954, 1, 0, 0, 0, 235, 958, 1, 0, 0, 0, 237, 963, 1, 0, 0, 0, 239, 968, 1, 0, 0, 0, 241, 973, 1, 0, 0, 0, 243, 980, 1, 0, 0, 0, 245, 989, 1, 0, 0, 0, 247, 996, 1, 0, 0, 0, 249, 1000, 1, 0, 0, 0, 251, 1004, 1, 0, 0, 0, 253, 1008, 1, 0, 0, 0, 255, 1012, 1, 0, 0, 0, 257, 1016, 1, 0, 0, 0, 259, 1022, 1, 0, 0, 0, 261, 1026, 1, 0, 0, 0, 263, 1030, 1, 0, 0, 0, 265, 1034, 1, 0, 0, 0, 267, 1038, 1, 0, 0, 0, 269, 1042, 1, 0, 0, 0, 271, 1046, 1, 0, 0, 0, 273, 1050, 1, 0, 0, 0, 275, 1054, 1, 0, 0, 0, 277, 1058, 1, 0, 0, 0, 279, 1063, 1, 0, 0, 0, 281, 1067, 1, 0, 0, 0, 283, 1071, 1, 0, 0, 0, 285, 1075, 1, 0, 0, 0, 287, 1079, 1, 0, 0, 0, 289, 1083, 1, 0, 0, 0, 291, 1087, 1, 0, 0, 0, 293, 1092, 1, 0, 0, 0, 295, 1097, 1, 0, 0, 0, 297, 1107, 1, 0, 0, 0, 299, 1111, 1, 0, 0, 0, 301, 1115, 1, 0, 0, 0, 303, 1119, 1, 0, 0, 0, 305, 1124, 1, 0, 0, 0, 307, 1131, 1, 0, 0, 0, 309, 1135, 1, 0, 0, 0, 311, 1139, 1, 0, 0, 0, 313, 1143, 1, 0, 0, 0, 315, 316, 5, 100, 0, 0, 316, 317, 5, 105, 0, 0, 317, 318, 5, 115, 0, 0, 318, 319, 5, 115, 0, 0, 319, 320, 5, 101, 0, 0, 320, 321, 5, 99, 0, 0, 321, 322, 5, 116, 0, 0, 322, 323, 1, 0, 0, 0, 323, 324, 6, 0, 0, 0, 324, 12, 1, 0, 0, 0, 325, 326, 5, 100, 0, 0, 326, 327, 5, 114, 0, 0, 327, 328, 5, 111, 0, 0, 328, 329, 5, 112, 0, 0, 329, 330, 1, 0, 0, 0, 330, 331, 6, 1, 1, 0, 331, 14, 1, 0, 0, 0, 332, 333, 5, 101, 0, 0, 333, 334, 5, 110, 0, 0, 334, 335, 5, 114, 0, 0, 335, 336, 5, 105, 0, 0, 336, 337, 5, 99, 0, 0, 337, 338, 5, 104, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 6, 2, 2, 0, 340, 16, 1, 0, 0, 0, 341, 342, 5, 101, 0, 0, 342, 343, 5, 118, 0, 0, 343, 344, 5, 97, 0, 0, 344, 345, 5, 108, 0, 0, 345, 346, 1, 0, 0, 0, 346, 347, 6, 3, 0, 0, 347, 18, 1, 0, 0, 0, 348, 349, 5, 101, 0, 0, 349, 350, 5, 120, 0, 0, 350, 351, 5, 112, 0, 0, 351, 352, 5, 108, 0, 0, 352, 353, 5, 97, 0, 0, 353, 354, 5, 105, 0, 0, 354, 355, 5, 110, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 4, 3, 0, 357, 20, 1, 0, 0, 0, 358, 359, 5, 102, 0, 0, 359, 360, 5, 114, 0, 0, 360, 361, 5, 111, 0, 0, 361, 362, 5, 109, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 5, 4, 0, 364, 22, 1, 0, 0, 0, 365, 366, 5, 103, 0, 0, 366, 367, 5, 114, 0, 0, 367, 368, 5, 111, 0, 0, 368, 369, 5, 107, 0, 0, 369, 370, 1, 0, 0, 0, 370, 371, 6, 6, 0, 0, 371, 24, 1, 0, 0, 0, 372, 373, 5, 105, 0, 0, 373, 374, 5, 110, 0, 0, 374, 375, 5, 108, 0, 0, 375, 376, 5, 105, 0, 0, 376, 377, 5, 110, 0, 0, 377, 378, 5, 101, 0, 0, 378, 379, 5, 115, 0, 0, 379, 380, 5, 116, 0, 0, 380, 381, 5, 97, 0, 0, 381, 382, 5, 116, 0, 0, 382, 383, 5, 115, 0, 0, 383, 384, 1, 0, 0, 0, 384, 385, 6, 7, 0, 0, 385, 26, 1, 0, 0, 0, 386, 387, 5, 107, 0, 0, 387, 388, 5, 101, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 112, 0, 0, 390, 391, 1, 0, 0, 0, 391, 392, 6, 8, 1, 0, 392, 28, 1, 0, 0, 0, 393, 394, 5, 108, 0, 0, 394, 395, 5, 105, 0, 0, 395, 396, 5, 109, 0, 0, 396, 397, 5, 105, 0, 0, 397, 398, 5, 116, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 9, 0, 0, 400, 30, 1, 0, 0, 0, 401, 402, 5, 109, 0, 0, 402, 403, 5, 118, 0, 0, 403, 404, 5, 95, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 120, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 5, 97, 0, 0, 408, 409, 5, 110, 0, 0, 409, 410, 5, 100, 0, 0, 410, 411, 1, 0, 0, 0, 411, 412, 6, 10, 5, 0, 412, 32, 1, 0, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 101, 0, 0, 415, 416, 5, 110, 0, 0, 416, 417, 5, 97, 0, 0, 417, 418, 5, 109, 0, 0, 418, 419, 5, 101, 0, 0, 419, 420, 1, 0, 0, 0, 420, 421, 6, 11, 6, 0, 421, 34, 1, 0, 0, 0, 422, 423, 5, 114, 0, 0, 423, 424, 5, 111, 0, 0, 424, 425, 5, 119, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 12, 0, 0, 427, 36, 1, 0, 0, 0, 428, 429, 5, 115, 0, 0, 429, 430, 5, 104, 0, 0, 430, 431, 5, 111, 0, 0, 431, 432, 5, 119, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 13, 7, 0, 434, 38, 1, 0, 0, 0, 435, 436, 5, 115, 0, 0, 436, 437, 5, 111, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 116, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 14, 0, 0, 441, 40, 1, 0, 0, 0, 442, 443, 5, 115, 0, 0, 443, 444, 5, 116, 0, 0, 444, 445, 5, 97, 0, 0, 445, 446, 5, 116, 0, 0, 446, 447, 5, 115, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 15, 0, 0, 449, 42, 1, 0, 0, 0, 450, 451, 5, 119, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 101, 0, 0, 453, 454, 5, 114, 0, 0, 454, 455, 5, 101, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 6, 16, 0, 0, 457, 44, 1, 0, 0, 0, 458, 460, 8, 0, 0, 0, 459, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 459, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 17, 0, 0, 464, 46, 1, 0, 0, 0, 465, 466, 5, 47, 0, 0, 466, 467, 5, 47, 0, 0, 467, 471, 1, 0, 0, 0, 468, 470, 8, 1, 0, 0, 469, 468, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 476, 5, 13, 0, 0, 475, 474, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 478, 1, 0, 0, 0, 477, 479, 5, 10, 0, 0, 478, 477, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 18, 8, 0, 481, 48, 1, 0, 0, 0, 482, 483, 5, 47, 0, 0, 483, 484, 5, 42, 0, 0, 484, 489, 1, 0, 0, 0, 485, 488, 3, 49, 19, 0, 486, 488, 9, 0, 0, 0, 487, 485, 1, 0, 0, 0, 487, 486, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 490, 492, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 493, 5, 42, 0, 0, 493, 494, 5, 47, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 19, 8, 0, 496, 50, 1, 0, 0, 0, 497, 499, 7, 2, 0, 0, 498, 497, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 20, 8, 0, 503, 52, 1, 0, 0, 0, 504, 505, 3, 159, 74, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 21, 9, 0, 507, 508, 6, 21, 10, 0, 508, 54, 1, 0, 0, 0, 509, 510, 3, 63, 26, 0, 510, 511, 1, 0, 0, 0, 511, 512, 6, 22, 11, 0, 512, 513, 6, 22, 12, 0, 513, 56, 1, 0, 0, 0, 514, 515, 3, 51, 20, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 23, 8, 0, 517, 58, 1, 0, 0, 0, 518, 519, 3, 47, 18, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 24, 8, 0, 521, 60, 1, 0, 0, 0, 522, 523, 3, 49, 19, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 25, 8, 0, 525, 62, 1, 0, 0, 0, 526, 527, 5, 124, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 26, 12, 0, 529, 64, 1, 0, 0, 0, 530, 531, 7, 3, 0, 0, 531, 66, 1, 0, 0, 0, 532, 533, 7, 4, 0, 0, 533, 68, 1, 0, 0, 0, 534, 535, 5, 92, 0, 0, 535, 536, 7, 5, 0, 0, 536, 70, 1, 0, 0, 0, 537, 538, 8, 6, 0, 0, 538, 72, 1, 0, 0, 0, 539, 541, 7, 7, 0, 0, 540, 542, 7, 8, 0, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 544, 1, 0, 0, 0, 543, 545, 3, 65, 27, 0, 544, 543, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 74, 1, 0, 0, 0, 548, 549, 5, 64, 0, 0, 549, 76, 1, 0, 0, 0, 550, 551, 5, 96, 0, 0, 551, 78, 1, 0, 0, 0, 552, 556, 8, 9, 0, 0, 553, 554, 5, 96, 0, 0, 554, 556, 5, 96, 0, 0, 555, 552, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 80, 1, 0, 0, 0, 557, 558, 5, 95, 0, 0, 558, 82, 1, 0, 0, 0, 559, 563, 3, 67, 28, 0, 560, 563, 3, 65, 27, 0, 561, 563, 3, 81, 35, 0, 562, 559, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 562, 561, 1, 0, 0, 0, 563, 84, 1, 0, 0, 0, 564, 569, 5, 34, 0, 0, 565, 568, 3, 69, 29, 0, 566, 568, 3, 71, 30, 0, 567, 565, 1, 0, 0, 0, 567, 566, 1, 0, 0, 0, 568, 571, 1, 0, 0, 0, 569, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 572, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 594, 5, 34, 0, 0, 573, 574, 5, 34, 0, 0, 574, 575, 5, 34, 0, 0, 575, 576, 5, 34, 0, 0, 576, 580, 1, 0, 0, 0, 577, 579, 8, 1, 0, 0, 578, 577, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 581, 583, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 584, 5, 34, 0, 0, 584, 585, 5, 34, 0, 0, 585, 586, 5, 34, 0, 0, 586, 588, 1, 0, 0, 0, 587, 589, 5, 34, 0, 0, 588, 587, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 591, 1, 0, 0, 0, 590, 592, 5, 34, 0, 0, 591, 590, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 564, 1, 0, 0, 0, 593, 573, 1, 0, 0, 0, 594, 86, 1, 0, 0, 0, 595, 597, 3, 65, 27, 0, 596, 595, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 88, 1, 0, 0, 0, 600, 602, 3, 65, 27, 0, 601, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 601, 1, 0, 0, 0, 603, 604, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 609, 3, 103, 46, 0, 606, 608, 3, 65, 27, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 643, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 614, 3, 103, 46, 0, 613, 615, 3, 65, 27, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 643, 1, 0, 0, 0, 618, 620, 3, 65, 27, 0, 619, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 630, 1, 0, 0, 0, 623, 627, 3, 103, 46, 0, 624, 626, 3, 65, 27, 0, 625, 624, 1, 0, 0, 0, 626, 629, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 623, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 3, 73, 31, 0, 633, 643, 1, 0, 0, 0, 634, 636, 3, 103, 46, 0, 635, 637, 3, 65, 27, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 3, 73, 31, 0, 641, 643, 1, 0, 0, 0, 642, 601, 1, 0, 0, 0, 642, 612, 1, 0, 0, 0, 642, 619, 1, 0, 0, 0, 642, 634, 1, 0, 0, 0, 643, 90, 1, 0, 0, 0, 644, 645, 5, 98, 0, 0, 645, 646, 5, 121, 0, 0, 646, 92, 1, 0, 0, 0, 647, 648, 5, 97, 0, 0, 648, 649, 5, 110, 0, 0, 649, 650, 5, 100, 0, 0, 650, 94, 1, 0, 0, 0, 651, 652, 5, 97, 0, 0, 652, 653, 5, 115, 0, 0, 653, 654, 5, 99, 0, 0, 654, 96, 1, 0, 0, 0, 655, 656, 5, 61, 0, 0, 656, 98, 1, 0, 0, 0, 657, 658, 5, 44, 0, 0, 658, 100, 1, 0, 0, 0, 659, 660, 5, 100, 0, 0, 660, 661, 5, 101, 0, 0, 661, 662, 5, 115, 0, 0, 662, 663, 5, 99, 0, 0, 663, 102, 1, 0, 0, 0, 664, 665, 5, 46, 0, 0, 665, 104, 1, 0, 0, 0, 666, 667, 5, 102, 0, 0, 667, 668, 5, 97, 0, 0, 668, 669, 5, 108, 0, 0, 669, 670, 5, 115, 0, 0, 670, 671, 5, 101, 0, 0, 671, 106, 1, 0, 0, 0, 672, 673, 5, 102, 0, 0, 673, 674, 5, 105, 0, 0, 674, 675, 5, 114, 0, 0, 675, 676, 5, 115, 0, 0, 676, 677, 5, 116, 0, 0, 677, 108, 1, 0, 0, 0, 678, 679, 5, 108, 0, 0, 679, 680, 5, 97, 0, 0, 680, 681, 5, 115, 0, 0, 681, 682, 5, 116, 0, 0, 682, 110, 1, 0, 0, 0, 683, 684, 5, 40, 0, 0, 684, 112, 1, 0, 0, 0, 685, 686, 5, 105, 0, 0, 686, 687, 5, 110, 0, 0, 687, 114, 1, 0, 0, 0, 688, 689, 5, 105, 0, 0, 689, 690, 5, 115, 0, 0, 690, 116, 1, 0, 0, 0, 691, 692, 5, 108, 0, 0, 692, 693, 5, 105, 0, 0, 693, 694, 5, 107, 0, 0, 694, 695, 5, 101, 0, 0, 695, 118, 1, 0, 0, 0, 696, 697, 5, 110, 0, 0, 697, 698, 5, 111, 0, 0, 698, 699, 5, 116, 0, 0, 699, 120, 1, 0, 0, 0, 700, 701, 5, 110, 0, 0, 701, 702, 5, 117, 0, 0, 702, 703, 5, 108, 0, 0, 703, 704, 5, 108, 0, 0, 704, 122, 1, 0, 0, 0, 705, 706, 5, 110, 0, 0, 706, 707, 5, 117, 0, 0, 707, 708, 5, 108, 0, 0, 708, 709, 5, 108, 0, 0, 709, 710, 5, 115, 0, 0, 710, 124, 1, 0, 0, 0, 711, 712, 5, 111, 0, 0, 712, 713, 5, 114, 0, 0, 713, 126, 1, 0, 0, 0, 714, 715, 5, 63, 0, 0, 715, 128, 1, 0, 0, 0, 716, 717, 5, 114, 0, 0, 717, 718, 5, 108, 0, 0, 718, 719, 5, 105, 0, 0, 719, 720, 5, 107, 0, 0, 720, 721, 5, 101, 0, 0, 721, 130, 1, 0, 0, 0, 722, 723, 5, 41, 0, 0, 723, 132, 1, 0, 0, 0, 724, 725, 5, 116, 0, 0, 725, 726, 5, 114, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 101, 0, 0, 728, 134, 1, 0, 0, 0, 729, 730, 5, 61, 0, 0, 730, 731, 5, 61, 0, 0, 731, 136, 1, 0, 0, 0, 732, 733, 5, 61, 0, 0, 733, 734, 5, 126, 0, 0, 734, 138, 1, 0, 0, 0, 735, 736, 5, 33, 0, 0, 736, 737, 5, 61, 0, 0, 737, 140, 1, 0, 0, 0, 738, 739, 5, 60, 0, 0, 739, 142, 1, 0, 0, 0, 740, 741, 5, 60, 0, 0, 741, 742, 5, 61, 0, 0, 742, 144, 1, 0, 0, 0, 743, 744, 5, 62, 0, 0, 744, 146, 1, 0, 0, 0, 745, 746, 5, 62, 0, 0, 746, 747, 5, 61, 0, 0, 747, 148, 1, 0, 0, 0, 748, 749, 5, 43, 0, 0, 749, 150, 1, 0, 0, 0, 750, 751, 5, 45, 0, 0, 751, 152, 1, 0, 0, 0, 752, 753, 5, 42, 0, 0, 753, 154, 1, 0, 0, 0, 754, 755, 5, 47, 0, 0, 755, 156, 1, 0, 0, 0, 756, 757, 5, 37, 0, 0, 757, 158, 1, 0, 0, 0, 758, 759, 5, 91, 0, 0, 759, 760, 1, 0, 0, 0, 760, 761, 6, 74, 0, 0, 761, 762, 6, 74, 0, 0, 762, 160, 1, 0, 0, 0, 763, 764, 5, 93, 0, 0, 764, 765, 1, 0, 0, 0, 765, 766, 6, 75, 12, 0, 766, 767, 6, 75, 12, 0, 767, 162, 1, 0, 0, 0, 768, 772, 3, 67, 28, 0, 769, 771, 3, 83, 36, 0, 770, 769, 1, 0, 0, 0, 771, 774, 1, 0, 0, 0, 772, 770, 1, 0, 0, 0, 772, 773, 1, 0, 0, 0, 773, 785, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 775, 778, 3, 81, 35, 0, 776, 778, 3, 75, 32, 0, 777, 775, 1, 0, 0, 0, 777, 776, 1, 0, 0, 0, 778, 780, 1, 0, 0, 0, 779, 781, 3, 83, 36, 0, 780, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 780, 1, 0, 0, 0, 782, 783, 1, 0, 0, 0, 783, 785, 1, 0, 0, 0, 784, 768, 1, 0, 0, 0, 784, 777, 1, 0, 0, 0, 785, 164, 1, 0, 0, 0, 786, 788, 3, 77, 33, 0, 787, 789, 3, 79, 34, 0, 788, 787, 1, 0, 0, 0, 789, 790, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 3, 77, 33, 0, 793, 166, 1, 0, 0, 0, 794, 795, 3, 165, 77, 0, 795, 168, 1, 0, 0, 0, 796, 797, 3, 47, 18, 0, 797, 798, 1, 0, 0, 0, 798, 799, 6, 79, 8, 0, 799, 170, 1, 0, 0, 0, 800, 801, 3, 49, 19, 0, 801, 802, 1, 0, 0, 0, 802, 803, 6, 80, 8, 0, 803, 172, 1, 0, 0, 0, 804, 805, 3, 51, 20, 0, 805, 806, 1, 0, 0, 0, 806, 807, 6, 81, 8, 0, 807, 174, 1, 0, 0, 0, 808, 809, 3, 63, 26, 0, 809, 810, 1, 0, 0, 0, 810, 811, 6, 82, 11, 0, 811, 812, 6, 82, 12, 0, 812, 176, 1, 0, 0, 0, 813, 814, 3, 159, 74, 0, 814, 815, 1, 0, 0, 0, 815, 816, 6, 83, 9, 0, 816, 178, 1, 0, 0, 0, 817, 818, 3, 161, 75, 0, 818, 819, 1, 0, 0, 0, 819, 820, 6, 84, 13, 0, 820, 180, 1, 0, 0, 0, 821, 822, 3, 99, 44, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 85, 14, 0, 824, 182, 1, 0, 0, 0, 825, 826, 3, 97, 43, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 86, 15, 0, 828, 184, 1, 0, 0, 0, 829, 830, 5, 109, 0, 0, 830, 831, 5, 101, 0, 0, 831, 832, 5, 116, 0, 0, 832, 833, 5, 97, 0, 0, 833, 834, 5, 100, 0, 0, 834, 835, 5, 97, 0, 0, 835, 836, 5, 116, 0, 0, 836, 837, 5, 97, 0, 0, 837, 186, 1, 0, 0, 0, 838, 842, 8, 10, 0, 0, 839, 840, 5, 47, 0, 0, 840, 842, 8, 11, 0, 0, 841, 838, 1, 0, 0, 0, 841, 839, 1, 0, 0, 0, 842, 188, 1, 0, 0, 0, 843, 845, 3, 187, 88, 0, 844, 843, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 844, 1, 0, 0, 0, 846, 847, 1, 0, 0, 0, 847, 190, 1, 0, 0, 0, 848, 849, 3, 167, 78, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 90, 16, 0, 851, 192, 1, 0, 0, 0, 852, 853, 3, 47, 18, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 91, 8, 0, 855, 194, 1, 0, 0, 0, 856, 857, 3, 49, 19, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 92, 8, 0, 859, 196, 1, 0, 0, 0, 860, 861, 3, 51, 20, 0, 861, 862, 1, 0, 0, 0, 862, 863, 6, 93, 8, 0, 863, 198, 1, 0, 0, 0, 864, 865, 3, 63, 26, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 94, 11, 0, 867, 868, 6, 94, 12, 0, 868, 200, 1, 0, 0, 0, 869, 870, 3, 103, 46, 0, 870, 871, 1, 0, 0, 0, 871, 872, 6, 95, 17, 0, 872, 202, 1, 0, 0, 0, 873, 874, 3, 99, 44, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 96, 14, 0, 876, 204, 1, 0, 0, 0, 877, 882, 3, 67, 28, 0, 878, 882, 3, 65, 27, 0, 879, 882, 3, 81, 35, 0, 880, 882, 3, 153, 71, 0, 881, 877, 1, 0, 0, 0, 881, 878, 1, 0, 0, 0, 881, 879, 1, 0, 0, 0, 881, 880, 1, 0, 0, 0, 882, 206, 1, 0, 0, 0, 883, 886, 3, 67, 28, 0, 884, 886, 3, 153, 71, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 890, 1, 0, 0, 0, 887, 889, 3, 205, 97, 0, 888, 887, 1, 0, 0, 0, 889, 892, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 903, 1, 0, 0, 0, 892, 890, 1, 0, 0, 0, 893, 896, 3, 81, 35, 0, 894, 896, 3, 75, 32, 0, 895, 893, 1, 0, 0, 0, 895, 894, 1, 0, 0, 0, 896, 898, 1, 0, 0, 0, 897, 899, 3, 205, 97, 0, 898, 897, 1, 0, 0, 0, 899, 900, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 903, 1, 0, 0, 0, 902, 885, 1, 0, 0, 0, 902, 895, 1, 0, 0, 0, 903, 208, 1, 0, 0, 0, 904, 907, 3, 207, 98, 0, 905, 907, 3, 165, 77, 0, 906, 904, 1, 0, 0, 0, 906, 905, 1, 0, 0, 0, 907, 908, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 210, 1, 0, 0, 0, 910, 911, 3, 47, 18, 0, 911, 912, 1, 0, 0, 0, 912, 913, 6, 100, 8, 0, 913, 212, 1, 0, 0, 0, 914, 915, 3, 49, 19, 0, 915, 916, 1, 0, 0, 0, 916, 917, 6, 101, 8, 0, 917, 214, 1, 0, 0, 0, 918, 919, 3, 51, 20, 0, 919, 920, 1, 0, 0, 0, 920, 921, 6, 102, 8, 0, 921, 216, 1, 0, 0, 0, 922, 923, 3, 63, 26, 0, 923, 924, 1, 0, 0, 0, 924, 925, 6, 103, 11, 0, 925, 926, 6, 103, 12, 0, 926, 218, 1, 0, 0, 0, 927, 928, 3, 97, 43, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 104, 15, 0, 930, 220, 1, 0, 0, 0, 931, 932, 3, 99, 44, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 105, 14, 0, 934, 222, 1, 0, 0, 0, 935, 936, 3, 103, 46, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 106, 17, 0, 938, 224, 1, 0, 0, 0, 939, 940, 5, 97, 0, 0, 940, 941, 5, 115, 0, 0, 941, 226, 1, 0, 0, 0, 942, 943, 3, 209, 99, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 108, 18, 0, 945, 228, 1, 0, 0, 0, 946, 947, 3, 47, 18, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 109, 8, 0, 949, 230, 1, 0, 0, 0, 950, 951, 3, 49, 19, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 110, 8, 0, 953, 232, 1, 0, 0, 0, 954, 955, 3, 51, 20, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 111, 8, 0, 957, 234, 1, 0, 0, 0, 958, 959, 3, 63, 26, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 112, 11, 0, 961, 962, 6, 112, 12, 0, 962, 236, 1, 0, 0, 0, 963, 964, 3, 159, 74, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 113, 9, 0, 966, 967, 6, 113, 19, 0, 967, 238, 1, 0, 0, 0, 968, 969, 5, 111, 0, 0, 969, 970, 5, 110, 0, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 114, 20, 0, 972, 240, 1, 0, 0, 0, 973, 974, 5, 119, 0, 0, 974, 975, 5, 105, 0, 0, 975, 976, 5, 116, 0, 0, 976, 977, 5, 104, 0, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 115, 20, 0, 979, 242, 1, 0, 0, 0, 980, 981, 8, 12, 0, 0, 981, 244, 1, 0, 0, 0, 982, 984, 3, 243, 116, 0, 983, 982, 1, 0, 0, 0, 984, 985, 1, 0, 0, 0, 985, 983, 1, 0, 0, 0, 985, 986, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 988, 3, 305, 147, 0, 988, 990, 1, 0, 0, 0, 989, 983, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 992, 1, 0, 0, 0, 991, 993, 3, 243, 116, 0, 992, 991, 1, 0, 0, 0, 993, 994, 1, 0, 0, 0, 994, 992, 1, 0, 0, 0, 994, 995, 1, 0, 0, 0, 995, 246, 1, 0, 0, 0, 996, 997, 3, 167, 78, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 118, 16, 0, 999, 248, 1, 0, 0, 0, 1000, 1001, 3, 245, 117, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 119, 21, 0, 1003, 250, 1, 0, 0, 0, 1004, 1005, 3, 47, 18, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 120, 8, 0, 1007, 252, 1, 0, 0, 0, 1008, 1009, 3, 49, 19, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 121, 8, 0, 1011, 254, 1, 0, 0, 0, 1012, 1013, 3, 51, 20, 0, 1013, 1014, 1, 0, 0, 0, 1014, 1015, 6, 122, 8, 0, 1015, 256, 1, 0, 0, 0, 1016, 1017, 3, 63, 26, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1019, 6, 123, 11, 0, 1019, 1020, 6, 123, 12, 0, 1020, 1021, 6, 123, 12, 0, 1021, 258, 1, 0, 0, 0, 1022, 1023, 3, 97, 43, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 124, 15, 0, 1025, 260, 1, 0, 0, 0, 1026, 1027, 3, 99, 44, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 125, 14, 0, 1029, 262, 1, 0, 0, 0, 1030, 1031, 3, 103, 46, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 126, 17, 0, 1033, 264, 1, 0, 0, 0, 1034, 1035, 3, 241, 115, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 127, 22, 0, 1037, 266, 1, 0, 0, 0, 1038, 1039, 3, 209, 99, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 128, 18, 0, 1041, 268, 1, 0, 0, 0, 1042, 1043, 3, 167, 78, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 129, 16, 0, 1045, 270, 1, 0, 0, 0, 1046, 1047, 3, 47, 18, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 130, 8, 0, 1049, 272, 1, 0, 0, 0, 1050, 1051, 3, 49, 19, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 131, 8, 0, 1053, 274, 1, 0, 0, 0, 1054, 1055, 3, 51, 20, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 132, 8, 0, 1057, 276, 1, 0, 0, 0, 1058, 1059, 3, 63, 26, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 133, 11, 0, 1061, 1062, 6, 133, 12, 0, 1062, 278, 1, 0, 0, 0, 1063, 1064, 3, 103, 46, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 134, 17, 0, 1066, 280, 1, 0, 0, 0, 1067, 1068, 3, 167, 78, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 135, 16, 0, 1070, 282, 1, 0, 0, 0, 1071, 1072, 3, 163, 76, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 136, 23, 0, 1074, 284, 1, 0, 0, 0, 1075, 1076, 3, 47, 18, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 137, 8, 0, 1078, 286, 1, 0, 0, 0, 1079, 1080, 3, 49, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 138, 8, 0, 1082, 288, 1, 0, 0, 0, 1083, 1084, 3, 51, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 139, 8, 0, 1086, 290, 1, 0, 0, 0, 1087, 1088, 3, 63, 26, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 140, 11, 0, 1090, 1091, 6, 140, 12, 0, 1091, 292, 1, 0, 0, 0, 1092, 1093, 5, 105, 0, 0, 1093, 1094, 5, 110, 0, 0, 1094, 1095, 5, 102, 0, 0, 1095, 1096, 5, 111, 0, 0, 1096, 294, 1, 0, 0, 0, 1097, 1098, 5, 102, 0, 0, 1098, 1099, 5, 117, 0, 0, 1099, 1100, 5, 110, 0, 0, 1100, 1101, 5, 99, 0, 0, 1101, 1102, 5, 116, 0, 0, 1102, 1103, 5, 105, 0, 0, 1103, 1104, 5, 111, 0, 0, 1104, 1105, 5, 110, 0, 0, 1105, 1106, 5, 115, 0, 0, 1106, 296, 1, 0, 0, 0, 1107, 1108, 3, 47, 18, 0, 1108, 1109, 1, 0, 0, 0, 1109, 1110, 6, 143, 8, 0, 1110, 298, 1, 0, 0, 0, 1111, 1112, 3, 49, 19, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 144, 8, 0, 1114, 300, 1, 0, 0, 0, 1115, 1116, 3, 51, 20, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 145, 8, 0, 1118, 302, 1, 0, 0, 0, 1119, 1120, 3, 161, 75, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 146, 13, 0, 1122, 1123, 6, 146, 12, 0, 1123, 304, 1, 0, 0, 0, 1124, 1125, 5, 58, 0, 0, 1125, 306, 1, 0, 0, 0, 1126, 1132, 3, 75, 32, 0, 1127, 1132, 3, 65, 27, 0, 1128, 1132, 3, 103, 46, 0, 1129, 1132, 3, 67, 28, 0, 1130, 1132, 3, 81, 35, 0, 1131, 1126, 1, 0, 0, 0, 1131, 1127, 1, 0, 0, 0, 1131, 1128, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1130, 1, 0, 0, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1134, 1, 0, 0, 0, 1134, 308, 1, 0, 0, 0, 1135, 1136, 3, 47, 18, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 149, 8, 0, 1138, 310, 1, 0, 0, 0, 1139, 1140, 3, 49, 19, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 150, 8, 0, 1142, 312, 1, 0, 0, 0, 1143, 1144, 3, 51, 20, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 151, 8, 0, 1146, 314, 1, 0, 0, 0, 57, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 461, 471, 475, 478, 487, 489, 500, 541, 546, 555, 562, 567, 569, 580, 588, 591, 593, 598, 603, 609, 616, 621, 627, 630, 638, 642, 772, 777, 782, 784, 790, 841, 846, 881, 885, 890, 895, 900, 902, 906, 908, 985, 989, 994, 1131, 1133, 24, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 25, 0, 4, 0, 0, 7, 64, 0, 7, 33, 0, 7, 32, 0, 7, 66, 0, 7, 35, 0, 7, 75, 0, 5, 10, 0, 5, 7, 0, 7, 85, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file +[4, 0, 108, 1182, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 478, 8, 18, 11, 18, 12, 18, 479, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 488, 8, 19, 10, 19, 12, 19, 491, 9, 19, 1, 19, 3, 19, 494, 8, 19, 1, 19, 3, 19, 497, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 506, 8, 20, 10, 20, 12, 20, 509, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 517, 8, 21, 11, 21, 12, 21, 518, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 560, 8, 32, 1, 32, 4, 32, 563, 8, 32, 11, 32, 12, 32, 564, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 574, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 581, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 586, 8, 38, 10, 38, 12, 38, 589, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 597, 8, 38, 10, 38, 12, 38, 600, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 607, 8, 38, 1, 38, 3, 38, 610, 8, 38, 3, 38, 612, 8, 38, 1, 39, 4, 39, 615, 8, 39, 11, 39, 12, 39, 616, 1, 40, 4, 40, 620, 8, 40, 11, 40, 12, 40, 621, 1, 40, 1, 40, 5, 40, 626, 8, 40, 10, 40, 12, 40, 629, 9, 40, 1, 40, 1, 40, 4, 40, 633, 8, 40, 11, 40, 12, 40, 634, 1, 40, 4, 40, 638, 8, 40, 11, 40, 12, 40, 639, 1, 40, 1, 40, 5, 40, 644, 8, 40, 10, 40, 12, 40, 647, 9, 40, 3, 40, 649, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 655, 8, 40, 11, 40, 12, 40, 656, 1, 40, 1, 40, 3, 40, 661, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 789, 8, 77, 10, 77, 12, 77, 792, 9, 77, 1, 77, 1, 77, 3, 77, 796, 8, 77, 1, 77, 4, 77, 799, 8, 77, 11, 77, 12, 77, 800, 3, 77, 803, 8, 77, 1, 78, 1, 78, 4, 78, 807, 8, 78, 11, 78, 12, 78, 808, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 3, 89, 860, 8, 89, 1, 90, 4, 90, 863, 8, 90, 11, 90, 12, 90, 864, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 3, 98, 900, 8, 98, 1, 99, 1, 99, 3, 99, 904, 8, 99, 1, 99, 5, 99, 907, 8, 99, 10, 99, 12, 99, 910, 9, 99, 1, 99, 1, 99, 3, 99, 914, 8, 99, 1, 99, 4, 99, 917, 8, 99, 11, 99, 12, 99, 918, 3, 99, 921, 8, 99, 1, 100, 1, 100, 4, 100, 925, 8, 100, 11, 100, 12, 100, 926, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 4, 118, 1002, 8, 118, 11, 118, 12, 118, 1003, 1, 118, 1, 118, 3, 118, 1008, 8, 118, 1, 118, 4, 118, 1011, 8, 118, 11, 118, 12, 118, 1012, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 4, 153, 1167, 8, 153, 11, 153, 12, 153, 1168, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 2, 507, 598, 0, 157, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 0, 170, 67, 172, 68, 174, 69, 176, 70, 178, 0, 180, 0, 182, 0, 184, 0, 186, 0, 188, 71, 190, 0, 192, 72, 194, 0, 196, 73, 198, 74, 200, 75, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 76, 214, 77, 216, 78, 218, 79, 220, 0, 222, 0, 224, 0, 226, 0, 228, 80, 230, 0, 232, 81, 234, 82, 236, 83, 238, 0, 240, 0, 242, 84, 244, 85, 246, 0, 248, 86, 250, 0, 252, 0, 254, 87, 256, 88, 258, 89, 260, 0, 262, 0, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 90, 276, 91, 278, 92, 280, 0, 282, 0, 284, 0, 286, 0, 288, 93, 290, 94, 292, 95, 294, 0, 296, 96, 298, 97, 300, 98, 302, 99, 304, 0, 306, 100, 308, 101, 310, 102, 312, 103, 314, 0, 316, 104, 318, 105, 320, 106, 322, 107, 324, 108, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1209, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 4, 202, 1, 0, 0, 0, 4, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 5, 220, 1, 0, 0, 0, 5, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 7, 260, 1, 0, 0, 0, 7, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 8, 280, 1, 0, 0, 0, 8, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 9, 294, 1, 0, 0, 0, 9, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 10, 304, 1, 0, 0, 0, 10, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 11, 314, 1, 0, 0, 0, 11, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 12, 326, 1, 0, 0, 0, 14, 336, 1, 0, 0, 0, 16, 343, 1, 0, 0, 0, 18, 352, 1, 0, 0, 0, 20, 359, 1, 0, 0, 0, 22, 369, 1, 0, 0, 0, 24, 376, 1, 0, 0, 0, 26, 383, 1, 0, 0, 0, 28, 397, 1, 0, 0, 0, 30, 404, 1, 0, 0, 0, 32, 412, 1, 0, 0, 0, 34, 419, 1, 0, 0, 0, 36, 431, 1, 0, 0, 0, 38, 440, 1, 0, 0, 0, 40, 446, 1, 0, 0, 0, 42, 453, 1, 0, 0, 0, 44, 460, 1, 0, 0, 0, 46, 468, 1, 0, 0, 0, 48, 477, 1, 0, 0, 0, 50, 483, 1, 0, 0, 0, 52, 500, 1, 0, 0, 0, 54, 516, 1, 0, 0, 0, 56, 522, 1, 0, 0, 0, 58, 527, 1, 0, 0, 0, 60, 532, 1, 0, 0, 0, 62, 536, 1, 0, 0, 0, 64, 540, 1, 0, 0, 0, 66, 544, 1, 0, 0, 0, 68, 548, 1, 0, 0, 0, 70, 550, 1, 0, 0, 0, 72, 552, 1, 0, 0, 0, 74, 555, 1, 0, 0, 0, 76, 557, 1, 0, 0, 0, 78, 566, 1, 0, 0, 0, 80, 568, 1, 0, 0, 0, 82, 573, 1, 0, 0, 0, 84, 575, 1, 0, 0, 0, 86, 580, 1, 0, 0, 0, 88, 611, 1, 0, 0, 0, 90, 614, 1, 0, 0, 0, 92, 660, 1, 0, 0, 0, 94, 662, 1, 0, 0, 0, 96, 665, 1, 0, 0, 0, 98, 669, 1, 0, 0, 0, 100, 673, 1, 0, 0, 0, 102, 675, 1, 0, 0, 0, 104, 677, 1, 0, 0, 0, 106, 682, 1, 0, 0, 0, 108, 684, 1, 0, 0, 0, 110, 690, 1, 0, 0, 0, 112, 696, 1, 0, 0, 0, 114, 701, 1, 0, 0, 0, 116, 703, 1, 0, 0, 0, 118, 706, 1, 0, 0, 0, 120, 709, 1, 0, 0, 0, 122, 714, 1, 0, 0, 0, 124, 718, 1, 0, 0, 0, 126, 723, 1, 0, 0, 0, 128, 729, 1, 0, 0, 0, 130, 732, 1, 0, 0, 0, 132, 734, 1, 0, 0, 0, 134, 740, 1, 0, 0, 0, 136, 742, 1, 0, 0, 0, 138, 747, 1, 0, 0, 0, 140, 750, 1, 0, 0, 0, 142, 753, 1, 0, 0, 0, 144, 756, 1, 0, 0, 0, 146, 758, 1, 0, 0, 0, 148, 761, 1, 0, 0, 0, 150, 763, 1, 0, 0, 0, 152, 766, 1, 0, 0, 0, 154, 768, 1, 0, 0, 0, 156, 770, 1, 0, 0, 0, 158, 772, 1, 0, 0, 0, 160, 774, 1, 0, 0, 0, 162, 776, 1, 0, 0, 0, 164, 781, 1, 0, 0, 0, 166, 802, 1, 0, 0, 0, 168, 804, 1, 0, 0, 0, 170, 812, 1, 0, 0, 0, 172, 814, 1, 0, 0, 0, 174, 818, 1, 0, 0, 0, 176, 822, 1, 0, 0, 0, 178, 826, 1, 0, 0, 0, 180, 831, 1, 0, 0, 0, 182, 835, 1, 0, 0, 0, 184, 839, 1, 0, 0, 0, 186, 843, 1, 0, 0, 0, 188, 847, 1, 0, 0, 0, 190, 859, 1, 0, 0, 0, 192, 862, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 870, 1, 0, 0, 0, 198, 874, 1, 0, 0, 0, 200, 878, 1, 0, 0, 0, 202, 882, 1, 0, 0, 0, 204, 887, 1, 0, 0, 0, 206, 891, 1, 0, 0, 0, 208, 899, 1, 0, 0, 0, 210, 920, 1, 0, 0, 0, 212, 924, 1, 0, 0, 0, 214, 928, 1, 0, 0, 0, 216, 932, 1, 0, 0, 0, 218, 936, 1, 0, 0, 0, 220, 940, 1, 0, 0, 0, 222, 945, 1, 0, 0, 0, 224, 949, 1, 0, 0, 0, 226, 953, 1, 0, 0, 0, 228, 957, 1, 0, 0, 0, 230, 960, 1, 0, 0, 0, 232, 964, 1, 0, 0, 0, 234, 968, 1, 0, 0, 0, 236, 972, 1, 0, 0, 0, 238, 976, 1, 0, 0, 0, 240, 981, 1, 0, 0, 0, 242, 986, 1, 0, 0, 0, 244, 991, 1, 0, 0, 0, 246, 998, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1014, 1, 0, 0, 0, 252, 1018, 1, 0, 0, 0, 254, 1022, 1, 0, 0, 0, 256, 1026, 1, 0, 0, 0, 258, 1030, 1, 0, 0, 0, 260, 1034, 1, 0, 0, 0, 262, 1040, 1, 0, 0, 0, 264, 1044, 1, 0, 0, 0, 266, 1048, 1, 0, 0, 0, 268, 1052, 1, 0, 0, 0, 270, 1056, 1, 0, 0, 0, 272, 1060, 1, 0, 0, 0, 274, 1064, 1, 0, 0, 0, 276, 1068, 1, 0, 0, 0, 278, 1072, 1, 0, 0, 0, 280, 1076, 1, 0, 0, 0, 282, 1081, 1, 0, 0, 0, 284, 1085, 1, 0, 0, 0, 286, 1089, 1, 0, 0, 0, 288, 1093, 1, 0, 0, 0, 290, 1097, 1, 0, 0, 0, 292, 1101, 1, 0, 0, 0, 294, 1105, 1, 0, 0, 0, 296, 1110, 1, 0, 0, 0, 298, 1115, 1, 0, 0, 0, 300, 1119, 1, 0, 0, 0, 302, 1123, 1, 0, 0, 0, 304, 1127, 1, 0, 0, 0, 306, 1132, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1146, 1, 0, 0, 0, 312, 1150, 1, 0, 0, 0, 314, 1154, 1, 0, 0, 0, 316, 1159, 1, 0, 0, 0, 318, 1166, 1, 0, 0, 0, 320, 1170, 1, 0, 0, 0, 322, 1174, 1, 0, 0, 0, 324, 1178, 1, 0, 0, 0, 326, 327, 5, 100, 0, 0, 327, 328, 5, 105, 0, 0, 328, 329, 5, 115, 0, 0, 329, 330, 5, 115, 0, 0, 330, 331, 5, 101, 0, 0, 331, 332, 5, 99, 0, 0, 332, 333, 5, 116, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 6, 0, 0, 0, 335, 13, 1, 0, 0, 0, 336, 337, 5, 100, 0, 0, 337, 338, 5, 114, 0, 0, 338, 339, 5, 111, 0, 0, 339, 340, 5, 112, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 1, 1, 0, 342, 15, 1, 0, 0, 0, 343, 344, 5, 101, 0, 0, 344, 345, 5, 110, 0, 0, 345, 346, 5, 114, 0, 0, 346, 347, 5, 105, 0, 0, 347, 348, 5, 99, 0, 0, 348, 349, 5, 104, 0, 0, 349, 350, 1, 0, 0, 0, 350, 351, 6, 2, 2, 0, 351, 17, 1, 0, 0, 0, 352, 353, 5, 101, 0, 0, 353, 354, 5, 118, 0, 0, 354, 355, 5, 97, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 6, 3, 0, 0, 358, 19, 1, 0, 0, 0, 359, 360, 5, 101, 0, 0, 360, 361, 5, 120, 0, 0, 361, 362, 5, 112, 0, 0, 362, 363, 5, 108, 0, 0, 363, 364, 5, 97, 0, 0, 364, 365, 5, 105, 0, 0, 365, 366, 5, 110, 0, 0, 366, 367, 1, 0, 0, 0, 367, 368, 6, 4, 3, 0, 368, 21, 1, 0, 0, 0, 369, 370, 5, 102, 0, 0, 370, 371, 5, 114, 0, 0, 371, 372, 5, 111, 0, 0, 372, 373, 5, 109, 0, 0, 373, 374, 1, 0, 0, 0, 374, 375, 6, 5, 4, 0, 375, 23, 1, 0, 0, 0, 376, 377, 5, 103, 0, 0, 377, 378, 5, 114, 0, 0, 378, 379, 5, 111, 0, 0, 379, 380, 5, 107, 0, 0, 380, 381, 1, 0, 0, 0, 381, 382, 6, 6, 0, 0, 382, 25, 1, 0, 0, 0, 383, 384, 5, 105, 0, 0, 384, 385, 5, 110, 0, 0, 385, 386, 5, 108, 0, 0, 386, 387, 5, 105, 0, 0, 387, 388, 5, 110, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 115, 0, 0, 390, 391, 5, 116, 0, 0, 391, 392, 5, 97, 0, 0, 392, 393, 5, 116, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 7, 0, 0, 396, 27, 1, 0, 0, 0, 397, 398, 5, 107, 0, 0, 398, 399, 5, 101, 0, 0, 399, 400, 5, 101, 0, 0, 400, 401, 5, 112, 0, 0, 401, 402, 1, 0, 0, 0, 402, 403, 6, 8, 1, 0, 403, 29, 1, 0, 0, 0, 404, 405, 5, 108, 0, 0, 405, 406, 5, 105, 0, 0, 406, 407, 5, 109, 0, 0, 407, 408, 5, 105, 0, 0, 408, 409, 5, 116, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 9, 0, 0, 411, 31, 1, 0, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 101, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 5, 97, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 10, 5, 0, 418, 33, 1, 0, 0, 0, 419, 420, 5, 109, 0, 0, 420, 421, 5, 118, 0, 0, 421, 422, 5, 95, 0, 0, 422, 423, 5, 101, 0, 0, 423, 424, 5, 120, 0, 0, 424, 425, 5, 112, 0, 0, 425, 426, 5, 97, 0, 0, 426, 427, 5, 110, 0, 0, 427, 428, 5, 100, 0, 0, 428, 429, 1, 0, 0, 0, 429, 430, 6, 11, 6, 0, 430, 35, 1, 0, 0, 0, 431, 432, 5, 114, 0, 0, 432, 433, 5, 101, 0, 0, 433, 434, 5, 110, 0, 0, 434, 435, 5, 97, 0, 0, 435, 436, 5, 109, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 12, 7, 0, 439, 37, 1, 0, 0, 0, 440, 441, 5, 114, 0, 0, 441, 442, 5, 111, 0, 0, 442, 443, 5, 119, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 13, 0, 0, 445, 39, 1, 0, 0, 0, 446, 447, 5, 115, 0, 0, 447, 448, 5, 104, 0, 0, 448, 449, 5, 111, 0, 0, 449, 450, 5, 119, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 6, 14, 8, 0, 452, 41, 1, 0, 0, 0, 453, 454, 5, 115, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 116, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 6, 15, 0, 0, 459, 43, 1, 0, 0, 0, 460, 461, 5, 115, 0, 0, 461, 462, 5, 116, 0, 0, 462, 463, 5, 97, 0, 0, 463, 464, 5, 116, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 16, 0, 0, 467, 45, 1, 0, 0, 0, 468, 469, 5, 119, 0, 0, 469, 470, 5, 104, 0, 0, 470, 471, 5, 101, 0, 0, 471, 472, 5, 114, 0, 0, 472, 473, 5, 101, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 6, 17, 0, 0, 475, 47, 1, 0, 0, 0, 476, 478, 8, 0, 0, 0, 477, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 18, 0, 0, 482, 49, 1, 0, 0, 0, 483, 484, 5, 47, 0, 0, 484, 485, 5, 47, 0, 0, 485, 489, 1, 0, 0, 0, 486, 488, 8, 1, 0, 0, 487, 486, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 494, 5, 13, 0, 0, 493, 492, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 497, 5, 10, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 6, 19, 9, 0, 499, 51, 1, 0, 0, 0, 500, 501, 5, 47, 0, 0, 501, 502, 5, 42, 0, 0, 502, 507, 1, 0, 0, 0, 503, 506, 3, 52, 20, 0, 504, 506, 9, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 504, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 510, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 5, 42, 0, 0, 511, 512, 5, 47, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 20, 9, 0, 514, 53, 1, 0, 0, 0, 515, 517, 7, 2, 0, 0, 516, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 21, 9, 0, 521, 55, 1, 0, 0, 0, 522, 523, 3, 162, 75, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 22, 10, 0, 525, 526, 6, 22, 11, 0, 526, 57, 1, 0, 0, 0, 527, 528, 3, 66, 27, 0, 528, 529, 1, 0, 0, 0, 529, 530, 6, 23, 12, 0, 530, 531, 6, 23, 13, 0, 531, 59, 1, 0, 0, 0, 532, 533, 3, 54, 21, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 24, 9, 0, 535, 61, 1, 0, 0, 0, 536, 537, 3, 50, 19, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 25, 9, 0, 539, 63, 1, 0, 0, 0, 540, 541, 3, 52, 20, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 26, 9, 0, 543, 65, 1, 0, 0, 0, 544, 545, 5, 124, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 27, 13, 0, 547, 67, 1, 0, 0, 0, 548, 549, 7, 3, 0, 0, 549, 69, 1, 0, 0, 0, 550, 551, 7, 4, 0, 0, 551, 71, 1, 0, 0, 0, 552, 553, 5, 92, 0, 0, 553, 554, 7, 5, 0, 0, 554, 73, 1, 0, 0, 0, 555, 556, 8, 6, 0, 0, 556, 75, 1, 0, 0, 0, 557, 559, 7, 7, 0, 0, 558, 560, 7, 8, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 562, 1, 0, 0, 0, 561, 563, 3, 68, 28, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 77, 1, 0, 0, 0, 566, 567, 5, 64, 0, 0, 567, 79, 1, 0, 0, 0, 568, 569, 5, 96, 0, 0, 569, 81, 1, 0, 0, 0, 570, 574, 8, 9, 0, 0, 571, 572, 5, 96, 0, 0, 572, 574, 5, 96, 0, 0, 573, 570, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 83, 1, 0, 0, 0, 575, 576, 5, 95, 0, 0, 576, 85, 1, 0, 0, 0, 577, 581, 3, 70, 29, 0, 578, 581, 3, 68, 28, 0, 579, 581, 3, 84, 36, 0, 580, 577, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 579, 1, 0, 0, 0, 581, 87, 1, 0, 0, 0, 582, 587, 5, 34, 0, 0, 583, 586, 3, 72, 30, 0, 584, 586, 3, 74, 31, 0, 585, 583, 1, 0, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 590, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 612, 5, 34, 0, 0, 591, 592, 5, 34, 0, 0, 592, 593, 5, 34, 0, 0, 593, 594, 5, 34, 0, 0, 594, 598, 1, 0, 0, 0, 595, 597, 8, 1, 0, 0, 596, 595, 1, 0, 0, 0, 597, 600, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 599, 601, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 601, 602, 5, 34, 0, 0, 602, 603, 5, 34, 0, 0, 603, 604, 5, 34, 0, 0, 604, 606, 1, 0, 0, 0, 605, 607, 5, 34, 0, 0, 606, 605, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 610, 5, 34, 0, 0, 609, 608, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 582, 1, 0, 0, 0, 611, 591, 1, 0, 0, 0, 612, 89, 1, 0, 0, 0, 613, 615, 3, 68, 28, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 91, 1, 0, 0, 0, 618, 620, 3, 68, 28, 0, 619, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 627, 3, 106, 47, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 629, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 661, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 632, 3, 106, 47, 0, 631, 633, 3, 68, 28, 0, 632, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 661, 1, 0, 0, 0, 636, 638, 3, 68, 28, 0, 637, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 648, 1, 0, 0, 0, 641, 645, 3, 106, 47, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 649, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 641, 1, 0, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 3, 76, 32, 0, 651, 661, 1, 0, 0, 0, 652, 654, 3, 106, 47, 0, 653, 655, 3, 68, 28, 0, 654, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 3, 76, 32, 0, 659, 661, 1, 0, 0, 0, 660, 619, 1, 0, 0, 0, 660, 630, 1, 0, 0, 0, 660, 637, 1, 0, 0, 0, 660, 652, 1, 0, 0, 0, 661, 93, 1, 0, 0, 0, 662, 663, 5, 98, 0, 0, 663, 664, 5, 121, 0, 0, 664, 95, 1, 0, 0, 0, 665, 666, 5, 97, 0, 0, 666, 667, 5, 110, 0, 0, 667, 668, 5, 100, 0, 0, 668, 97, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 115, 0, 0, 671, 672, 5, 99, 0, 0, 672, 99, 1, 0, 0, 0, 673, 674, 5, 61, 0, 0, 674, 101, 1, 0, 0, 0, 675, 676, 5, 44, 0, 0, 676, 103, 1, 0, 0, 0, 677, 678, 5, 100, 0, 0, 678, 679, 5, 101, 0, 0, 679, 680, 5, 115, 0, 0, 680, 681, 5, 99, 0, 0, 681, 105, 1, 0, 0, 0, 682, 683, 5, 46, 0, 0, 683, 107, 1, 0, 0, 0, 684, 685, 5, 102, 0, 0, 685, 686, 5, 97, 0, 0, 686, 687, 5, 108, 0, 0, 687, 688, 5, 115, 0, 0, 688, 689, 5, 101, 0, 0, 689, 109, 1, 0, 0, 0, 690, 691, 5, 102, 0, 0, 691, 692, 5, 105, 0, 0, 692, 693, 5, 114, 0, 0, 693, 694, 5, 115, 0, 0, 694, 695, 5, 116, 0, 0, 695, 111, 1, 0, 0, 0, 696, 697, 5, 108, 0, 0, 697, 698, 5, 97, 0, 0, 698, 699, 5, 115, 0, 0, 699, 700, 5, 116, 0, 0, 700, 113, 1, 0, 0, 0, 701, 702, 5, 40, 0, 0, 702, 115, 1, 0, 0, 0, 703, 704, 5, 105, 0, 0, 704, 705, 5, 110, 0, 0, 705, 117, 1, 0, 0, 0, 706, 707, 5, 105, 0, 0, 707, 708, 5, 115, 0, 0, 708, 119, 1, 0, 0, 0, 709, 710, 5, 108, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 107, 0, 0, 712, 713, 5, 101, 0, 0, 713, 121, 1, 0, 0, 0, 714, 715, 5, 110, 0, 0, 715, 716, 5, 111, 0, 0, 716, 717, 5, 116, 0, 0, 717, 123, 1, 0, 0, 0, 718, 719, 5, 110, 0, 0, 719, 720, 5, 117, 0, 0, 720, 721, 5, 108, 0, 0, 721, 722, 5, 108, 0, 0, 722, 125, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 117, 0, 0, 725, 726, 5, 108, 0, 0, 726, 727, 5, 108, 0, 0, 727, 728, 5, 115, 0, 0, 728, 127, 1, 0, 0, 0, 729, 730, 5, 111, 0, 0, 730, 731, 5, 114, 0, 0, 731, 129, 1, 0, 0, 0, 732, 733, 5, 63, 0, 0, 733, 131, 1, 0, 0, 0, 734, 735, 5, 114, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 105, 0, 0, 737, 738, 5, 107, 0, 0, 738, 739, 5, 101, 0, 0, 739, 133, 1, 0, 0, 0, 740, 741, 5, 41, 0, 0, 741, 135, 1, 0, 0, 0, 742, 743, 5, 116, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 117, 0, 0, 745, 746, 5, 101, 0, 0, 746, 137, 1, 0, 0, 0, 747, 748, 5, 61, 0, 0, 748, 749, 5, 61, 0, 0, 749, 139, 1, 0, 0, 0, 750, 751, 5, 61, 0, 0, 751, 752, 5, 126, 0, 0, 752, 141, 1, 0, 0, 0, 753, 754, 5, 33, 0, 0, 754, 755, 5, 61, 0, 0, 755, 143, 1, 0, 0, 0, 756, 757, 5, 60, 0, 0, 757, 145, 1, 0, 0, 0, 758, 759, 5, 60, 0, 0, 759, 760, 5, 61, 0, 0, 760, 147, 1, 0, 0, 0, 761, 762, 5, 62, 0, 0, 762, 149, 1, 0, 0, 0, 763, 764, 5, 62, 0, 0, 764, 765, 5, 61, 0, 0, 765, 151, 1, 0, 0, 0, 766, 767, 5, 43, 0, 0, 767, 153, 1, 0, 0, 0, 768, 769, 5, 45, 0, 0, 769, 155, 1, 0, 0, 0, 770, 771, 5, 42, 0, 0, 771, 157, 1, 0, 0, 0, 772, 773, 5, 47, 0, 0, 773, 159, 1, 0, 0, 0, 774, 775, 5, 37, 0, 0, 775, 161, 1, 0, 0, 0, 776, 777, 5, 91, 0, 0, 777, 778, 1, 0, 0, 0, 778, 779, 6, 75, 0, 0, 779, 780, 6, 75, 0, 0, 780, 163, 1, 0, 0, 0, 781, 782, 5, 93, 0, 0, 782, 783, 1, 0, 0, 0, 783, 784, 6, 76, 13, 0, 784, 785, 6, 76, 13, 0, 785, 165, 1, 0, 0, 0, 786, 790, 3, 70, 29, 0, 787, 789, 3, 86, 37, 0, 788, 787, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 803, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 793, 796, 3, 84, 36, 0, 794, 796, 3, 78, 33, 0, 795, 793, 1, 0, 0, 0, 795, 794, 1, 0, 0, 0, 796, 798, 1, 0, 0, 0, 797, 799, 3, 86, 37, 0, 798, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 798, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 803, 1, 0, 0, 0, 802, 786, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 803, 167, 1, 0, 0, 0, 804, 806, 3, 80, 34, 0, 805, 807, 3, 82, 35, 0, 806, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 811, 3, 80, 34, 0, 811, 169, 1, 0, 0, 0, 812, 813, 3, 168, 78, 0, 813, 171, 1, 0, 0, 0, 814, 815, 3, 50, 19, 0, 815, 816, 1, 0, 0, 0, 816, 817, 6, 80, 9, 0, 817, 173, 1, 0, 0, 0, 818, 819, 3, 52, 20, 0, 819, 820, 1, 0, 0, 0, 820, 821, 6, 81, 9, 0, 821, 175, 1, 0, 0, 0, 822, 823, 3, 54, 21, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 82, 9, 0, 825, 177, 1, 0, 0, 0, 826, 827, 3, 66, 27, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 83, 12, 0, 829, 830, 6, 83, 13, 0, 830, 179, 1, 0, 0, 0, 831, 832, 3, 162, 75, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 84, 10, 0, 834, 181, 1, 0, 0, 0, 835, 836, 3, 164, 76, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 85, 14, 0, 838, 183, 1, 0, 0, 0, 839, 840, 3, 102, 45, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 86, 15, 0, 842, 185, 1, 0, 0, 0, 843, 844, 3, 100, 44, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 87, 16, 0, 846, 187, 1, 0, 0, 0, 847, 848, 5, 109, 0, 0, 848, 849, 5, 101, 0, 0, 849, 850, 5, 116, 0, 0, 850, 851, 5, 97, 0, 0, 851, 852, 5, 100, 0, 0, 852, 853, 5, 97, 0, 0, 853, 854, 5, 116, 0, 0, 854, 855, 5, 97, 0, 0, 855, 189, 1, 0, 0, 0, 856, 860, 8, 10, 0, 0, 857, 858, 5, 47, 0, 0, 858, 860, 8, 11, 0, 0, 859, 856, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 860, 191, 1, 0, 0, 0, 861, 863, 3, 190, 89, 0, 862, 861, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 862, 1, 0, 0, 0, 864, 865, 1, 0, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 3, 170, 79, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 91, 17, 0, 869, 195, 1, 0, 0, 0, 870, 871, 3, 50, 19, 0, 871, 872, 1, 0, 0, 0, 872, 873, 6, 92, 9, 0, 873, 197, 1, 0, 0, 0, 874, 875, 3, 52, 20, 0, 875, 876, 1, 0, 0, 0, 876, 877, 6, 93, 9, 0, 877, 199, 1, 0, 0, 0, 878, 879, 3, 54, 21, 0, 879, 880, 1, 0, 0, 0, 880, 881, 6, 94, 9, 0, 881, 201, 1, 0, 0, 0, 882, 883, 3, 66, 27, 0, 883, 884, 1, 0, 0, 0, 884, 885, 6, 95, 12, 0, 885, 886, 6, 95, 13, 0, 886, 203, 1, 0, 0, 0, 887, 888, 3, 106, 47, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 96, 18, 0, 890, 205, 1, 0, 0, 0, 891, 892, 3, 102, 45, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 97, 15, 0, 894, 207, 1, 0, 0, 0, 895, 900, 3, 70, 29, 0, 896, 900, 3, 68, 28, 0, 897, 900, 3, 84, 36, 0, 898, 900, 3, 156, 72, 0, 899, 895, 1, 0, 0, 0, 899, 896, 1, 0, 0, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 209, 1, 0, 0, 0, 901, 904, 3, 70, 29, 0, 902, 904, 3, 156, 72, 0, 903, 901, 1, 0, 0, 0, 903, 902, 1, 0, 0, 0, 904, 908, 1, 0, 0, 0, 905, 907, 3, 208, 98, 0, 906, 905, 1, 0, 0, 0, 907, 910, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 921, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 911, 914, 3, 84, 36, 0, 912, 914, 3, 78, 33, 0, 913, 911, 1, 0, 0, 0, 913, 912, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 917, 3, 208, 98, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 921, 1, 0, 0, 0, 920, 903, 1, 0, 0, 0, 920, 913, 1, 0, 0, 0, 921, 211, 1, 0, 0, 0, 922, 925, 3, 210, 99, 0, 923, 925, 3, 168, 78, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 213, 1, 0, 0, 0, 928, 929, 3, 50, 19, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 101, 9, 0, 931, 215, 1, 0, 0, 0, 932, 933, 3, 52, 20, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 102, 9, 0, 935, 217, 1, 0, 0, 0, 936, 937, 3, 54, 21, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 103, 9, 0, 939, 219, 1, 0, 0, 0, 940, 941, 3, 66, 27, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 104, 12, 0, 943, 944, 6, 104, 13, 0, 944, 221, 1, 0, 0, 0, 945, 946, 3, 100, 44, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 105, 16, 0, 948, 223, 1, 0, 0, 0, 949, 950, 3, 102, 45, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 106, 15, 0, 952, 225, 1, 0, 0, 0, 953, 954, 3, 106, 47, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 107, 18, 0, 956, 227, 1, 0, 0, 0, 957, 958, 5, 97, 0, 0, 958, 959, 5, 115, 0, 0, 959, 229, 1, 0, 0, 0, 960, 961, 3, 212, 100, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 109, 19, 0, 963, 231, 1, 0, 0, 0, 964, 965, 3, 50, 19, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 110, 9, 0, 967, 233, 1, 0, 0, 0, 968, 969, 3, 52, 20, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 111, 9, 0, 971, 235, 1, 0, 0, 0, 972, 973, 3, 54, 21, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 112, 9, 0, 975, 237, 1, 0, 0, 0, 976, 977, 3, 66, 27, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 113, 12, 0, 979, 980, 6, 113, 13, 0, 980, 239, 1, 0, 0, 0, 981, 982, 3, 162, 75, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 114, 10, 0, 984, 985, 6, 114, 20, 0, 985, 241, 1, 0, 0, 0, 986, 987, 5, 111, 0, 0, 987, 988, 5, 110, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 115, 21, 0, 990, 243, 1, 0, 0, 0, 991, 992, 5, 119, 0, 0, 992, 993, 5, 105, 0, 0, 993, 994, 5, 116, 0, 0, 994, 995, 5, 104, 0, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 116, 21, 0, 997, 245, 1, 0, 0, 0, 998, 999, 8, 12, 0, 0, 999, 247, 1, 0, 0, 0, 1000, 1002, 3, 246, 117, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 3, 316, 152, 0, 1006, 1008, 1, 0, 0, 0, 1007, 1001, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1010, 1, 0, 0, 0, 1009, 1011, 3, 246, 117, 0, 1010, 1009, 1, 0, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1010, 1, 0, 0, 0, 1012, 1013, 1, 0, 0, 0, 1013, 249, 1, 0, 0, 0, 1014, 1015, 3, 170, 79, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 119, 17, 0, 1017, 251, 1, 0, 0, 0, 1018, 1019, 3, 248, 118, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 120, 22, 0, 1021, 253, 1, 0, 0, 0, 1022, 1023, 3, 50, 19, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 121, 9, 0, 1025, 255, 1, 0, 0, 0, 1026, 1027, 3, 52, 20, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 122, 9, 0, 1029, 257, 1, 0, 0, 0, 1030, 1031, 3, 54, 21, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 123, 9, 0, 1033, 259, 1, 0, 0, 0, 1034, 1035, 3, 66, 27, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 124, 12, 0, 1037, 1038, 6, 124, 13, 0, 1038, 1039, 6, 124, 13, 0, 1039, 261, 1, 0, 0, 0, 1040, 1041, 3, 100, 44, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 125, 16, 0, 1043, 263, 1, 0, 0, 0, 1044, 1045, 3, 102, 45, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 126, 15, 0, 1047, 265, 1, 0, 0, 0, 1048, 1049, 3, 106, 47, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 127, 18, 0, 1051, 267, 1, 0, 0, 0, 1052, 1053, 3, 244, 116, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 128, 23, 0, 1055, 269, 1, 0, 0, 0, 1056, 1057, 3, 212, 100, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 129, 19, 0, 1059, 271, 1, 0, 0, 0, 1060, 1061, 3, 170, 79, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 130, 17, 0, 1063, 273, 1, 0, 0, 0, 1064, 1065, 3, 50, 19, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 131, 9, 0, 1067, 275, 1, 0, 0, 0, 1068, 1069, 3, 52, 20, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 132, 9, 0, 1071, 277, 1, 0, 0, 0, 1072, 1073, 3, 54, 21, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 133, 9, 0, 1075, 279, 1, 0, 0, 0, 1076, 1077, 3, 66, 27, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 134, 12, 0, 1079, 1080, 6, 134, 13, 0, 1080, 281, 1, 0, 0, 0, 1081, 1082, 3, 106, 47, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 135, 18, 0, 1084, 283, 1, 0, 0, 0, 1085, 1086, 3, 170, 79, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 136, 17, 0, 1088, 285, 1, 0, 0, 0, 1089, 1090, 3, 166, 77, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 137, 24, 0, 1092, 287, 1, 0, 0, 0, 1093, 1094, 3, 50, 19, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 138, 9, 0, 1096, 289, 1, 0, 0, 0, 1097, 1098, 3, 52, 20, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 139, 9, 0, 1100, 291, 1, 0, 0, 0, 1101, 1102, 3, 54, 21, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 140, 9, 0, 1104, 293, 1, 0, 0, 0, 1105, 1106, 3, 66, 27, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 141, 12, 0, 1108, 1109, 6, 141, 13, 0, 1109, 295, 1, 0, 0, 0, 1110, 1111, 5, 105, 0, 0, 1111, 1112, 5, 110, 0, 0, 1112, 1113, 5, 102, 0, 0, 1113, 1114, 5, 111, 0, 0, 1114, 297, 1, 0, 0, 0, 1115, 1116, 3, 50, 19, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 143, 9, 0, 1118, 299, 1, 0, 0, 0, 1119, 1120, 3, 52, 20, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 144, 9, 0, 1122, 301, 1, 0, 0, 0, 1123, 1124, 3, 54, 21, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 145, 9, 0, 1126, 303, 1, 0, 0, 0, 1127, 1128, 3, 66, 27, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 146, 12, 0, 1130, 1131, 6, 146, 13, 0, 1131, 305, 1, 0, 0, 0, 1132, 1133, 5, 102, 0, 0, 1133, 1134, 5, 117, 0, 0, 1134, 1135, 5, 110, 0, 0, 1135, 1136, 5, 99, 0, 0, 1136, 1137, 5, 116, 0, 0, 1137, 1138, 5, 105, 0, 0, 1138, 1139, 5, 111, 0, 0, 1139, 1140, 5, 110, 0, 0, 1140, 1141, 5, 115, 0, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 50, 19, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 9, 0, 1145, 309, 1, 0, 0, 0, 1146, 1147, 3, 52, 20, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 149, 9, 0, 1149, 311, 1, 0, 0, 0, 1150, 1151, 3, 54, 21, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 150, 9, 0, 1153, 313, 1, 0, 0, 0, 1154, 1155, 3, 164, 76, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 151, 14, 0, 1157, 1158, 6, 151, 13, 0, 1158, 315, 1, 0, 0, 0, 1159, 1160, 5, 58, 0, 0, 1160, 317, 1, 0, 0, 0, 1161, 1167, 3, 78, 33, 0, 1162, 1167, 3, 68, 28, 0, 1163, 1167, 3, 106, 47, 0, 1164, 1167, 3, 70, 29, 0, 1165, 1167, 3, 84, 36, 0, 1166, 1161, 1, 0, 0, 0, 1166, 1162, 1, 0, 0, 0, 1166, 1163, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1165, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 319, 1, 0, 0, 0, 1170, 1171, 3, 50, 19, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 154, 9, 0, 1173, 321, 1, 0, 0, 0, 1174, 1175, 3, 52, 20, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 155, 9, 0, 1177, 323, 1, 0, 0, 0, 1178, 1179, 3, 54, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 156, 9, 0, 1181, 325, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 479, 489, 493, 496, 505, 507, 518, 559, 564, 573, 580, 585, 587, 598, 606, 609, 611, 616, 621, 627, 634, 639, 645, 648, 656, 660, 790, 795, 800, 802, 808, 859, 864, 899, 903, 908, 913, 918, 920, 924, 926, 1003, 1007, 1012, 1166, 1168, 25, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 67, 0, 7, 36, 0, 7, 76, 0, 5, 11, 0, 5, 7, 0, 7, 86, 0, 7, 85, 0, 7, 66, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 02ba5f7caacde..3107ec6259dbc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,28 +18,30 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, - EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, - PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, - LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, - RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, - GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, - CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, - EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, ID_PATTERN=75, - PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, - AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, - ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, - ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, - MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, - SETTING_WS=104; + KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, + STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, + WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, + PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, + ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, + LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, + RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, + GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, + FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, + PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, + AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, + ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, + ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, + ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, + MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, + META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10; + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, + SETTING_MODE=11; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -47,25 +49,25 @@ public class EsqlBaseLexer extends Lexer { public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "SETTING_MODE" + "META_MODE", "SETTING_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", - "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", - "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", - "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", - "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", + "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", + "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", + "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", + "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", + "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", + "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", + "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", + "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", @@ -79,9 +81,10 @@ private static String[] makeRuleNames() { "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", + "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -89,25 +92,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", - "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, - null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", - "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", - "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, - null, null, "'metadata'", null, null, null, null, null, null, null, null, - "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, - null, null, null, null, null, "'info'", "'functions'", null, null, null, - "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", + "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", - "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", + "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -121,8 +124,9 @@ private static String[] makeSymbolicNames() { "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", + "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } @@ -186,727 +190,749 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000h\u047b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000l\u049e\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ - "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ - "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ - "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ - "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ - "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ - "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ - "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ - "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ - "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ - "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ - "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ - "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ - "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ - "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ - "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ - "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ - "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ - "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ - "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ - "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ - "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ - "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ - "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ - "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ - "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ - "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ - "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ - "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ - "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ - "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ - "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ - "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ - "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ - "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ - "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ - "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0004\u0011\u01cc\b\u0011\u000b\u0011"+ - "\f\u0011\u01cd\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0005\u0012\u01d6\b\u0012\n\u0012\f\u0012\u01d9\t\u0012\u0001"+ - "\u0012\u0003\u0012\u01dc\b\u0012\u0001\u0012\u0003\u0012\u01df\b\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ + "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ + "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ + "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ + "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ + "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ + "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ + "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ + "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ + "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ + "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ + "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ + "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ + "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ + "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ + "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ + "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ + "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ + "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ + "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ + "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ + "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ + "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ + "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ + "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ + "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ + "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ + "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ + "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ + "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ + "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ + "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ + "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ + "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ + "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ + "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ + "\u0002\u009c\u0007\u009c\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ + "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ + "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01de\b\u0012\u000b\u0012\f"+ + "\u0012\u01df\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ "\u0001\u0013\u0005\u0013\u01e8\b\u0013\n\u0013\f\u0013\u01eb\t\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0004"+ - "\u0014\u01f3\b\u0014\u000b\u0014\f\u0014\u01f4\u0001\u0014\u0001\u0014"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ - "\u0001\u001f\u0003\u001f\u021e\b\u001f\u0001\u001f\u0004\u001f\u0221\b"+ - "\u001f\u000b\u001f\f\u001f\u0222\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001"+ - "\"\u0001\"\u0003\"\u022c\b\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003"+ - "$\u0233\b$\u0001%\u0001%\u0001%\u0005%\u0238\b%\n%\f%\u023b\t%\u0001%"+ - "\u0001%\u0001%\u0001%\u0001%\u0001%\u0005%\u0243\b%\n%\f%\u0246\t%\u0001"+ - "%\u0001%\u0001%\u0001%\u0001%\u0003%\u024d\b%\u0001%\u0003%\u0250\b%\u0003"+ - "%\u0252\b%\u0001&\u0004&\u0255\b&\u000b&\f&\u0256\u0001\'\u0004\'\u025a"+ - "\b\'\u000b\'\f\'\u025b\u0001\'\u0001\'\u0005\'\u0260\b\'\n\'\f\'\u0263"+ - "\t\'\u0001\'\u0001\'\u0004\'\u0267\b\'\u000b\'\f\'\u0268\u0001\'\u0004"+ - "\'\u026c\b\'\u000b\'\f\'\u026d\u0001\'\u0001\'\u0005\'\u0272\b\'\n\'\f"+ - "\'\u0275\t\'\u0003\'\u0277\b\'\u0001\'\u0001\'\u0001\'\u0001\'\u0004\'"+ - "\u027d\b\'\u000b\'\f\'\u027e\u0001\'\u0001\'\u0003\'\u0283\b\'\u0001("+ - "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ - "*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u0001"+ - "2\u00013\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00015\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00018\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001"+ - "C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001"+ - "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001J\u0001K\u0001"+ - "K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005L\u0303\bL\nL\fL\u0306\tL\u0001"+ - "L\u0001L\u0003L\u030a\bL\u0001L\u0004L\u030d\bL\u000bL\fL\u030e\u0003"+ - "L\u0311\bL\u0001M\u0001M\u0004M\u0315\bM\u000bM\fM\u0316\u0001M\u0001"+ - "M\u0001N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ - "P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001"+ - "S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001"+ - "U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001"+ - "W\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0003X\u034a\bX\u0001"+ - "Y\u0004Y\u034d\bY\u000bY\fY\u034e\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ - "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001"+ - "]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ - "`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0003a\u0372\ba\u0001"+ - "b\u0001b\u0003b\u0376\bb\u0001b\u0005b\u0379\bb\nb\fb\u037c\tb\u0001b"+ - "\u0001b\u0003b\u0380\bb\u0001b\u0004b\u0383\bb\u000bb\fb\u0384\u0003b"+ - "\u0387\bb\u0001c\u0001c\u0004c\u038b\bc\u000bc\fc\u038c\u0001d\u0001d"+ - "\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001"+ - "f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ - "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001"+ - "n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001"+ - "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ - "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ - "t\u0001u\u0004u\u03d8\bu\u000bu\fu\u03d9\u0001u\u0001u\u0003u\u03de\b"+ - "u\u0001u\u0004u\u03e1\bu\u000bu\fu\u03e2\u0001v\u0001v\u0001v\u0001v\u0001"+ - "w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ - "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ - "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ - "~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0013\u0003\u0013\u01ee\b\u0013\u0001\u0013\u0003\u0013\u01f1\b\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0005\u0014\u01fa\b\u0014\n\u0014\f\u0014\u01fd\t\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ + "\u0015\u0205\b\u0015\u000b\u0015\f\u0015\u0206\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ + " \u0003 \u0230\b \u0001 \u0004 \u0233\b \u000b \f \u0234\u0001!\u0001"+ + "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u023e\b#\u0001$\u0001$\u0001"+ + "%\u0001%\u0001%\u0003%\u0245\b%\u0001&\u0001&\u0001&\u0005&\u024a\b&\n"+ + "&\f&\u024d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u0255\b"+ + "&\n&\f&\u0258\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u025f\b&\u0001"+ + "&\u0003&\u0262\b&\u0003&\u0264\b&\u0001\'\u0004\'\u0267\b\'\u000b\'\f"+ + "\'\u0268\u0001(\u0004(\u026c\b(\u000b(\f(\u026d\u0001(\u0001(\u0005(\u0272"+ + "\b(\n(\f(\u0275\t(\u0001(\u0001(\u0004(\u0279\b(\u000b(\f(\u027a\u0001"+ + "(\u0004(\u027e\b(\u000b(\f(\u027f\u0001(\u0001(\u0005(\u0284\b(\n(\f("+ + "\u0287\t(\u0003(\u0289\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u028f\b("+ + "\u000b(\f(\u0290\u0001(\u0001(\u0003(\u0295\b(\u0001)\u0001)\u0001)\u0001"+ + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u0001"+ + "0\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u0001"+ + "1\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u0001"+ + "4\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u0001"+ + "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ + "9\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001<\u0001"+ + "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001"+ + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ + "A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001"+ + "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ + "J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001"+ + "L\u0001M\u0001M\u0005M\u0315\bM\nM\fM\u0318\tM\u0001M\u0001M\u0003M\u031c"+ + "\bM\u0001M\u0004M\u031f\bM\u000bM\fM\u0320\u0003M\u0323\bM\u0001N\u0001"+ + "N\u0004N\u0327\bN\u000bN\fN\u0328\u0001N\u0001N\u0001O\u0001O\u0001P\u0001"+ + "P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ + "R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001"+ + "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ + "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0003Y\u035c\bY\u0001Z\u0004Z\u035f\bZ\u000bZ\f"+ + "Z\u0360\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ + "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0003b\u0384\bb\u0001c\u0001c\u0003c\u0388"+ + "\bc\u0001c\u0005c\u038b\bc\nc\fc\u038e\tc\u0001c\u0001c\u0003c\u0392\b"+ + "c\u0001c\u0004c\u0395\bc\u000bc\fc\u0396\u0003c\u0399\bc\u0001d\u0001"+ + "d\u0004d\u039d\bd\u000bd\fd\u039e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001"+ + "f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ + "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ + "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0004v\u03ea\bv\u000b"+ + "v\fv\u03eb\u0001v\u0001v\u0003v\u03f0\bv\u0001v\u0004v\u03f3\bv\u000b"+ + "v\fv\u03f4\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001"+ + "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ + "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088"+ + "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f"+ - "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0094\u0004\u0094\u046c\b\u0094\u000b\u0094"+ - "\f\u0094\u046d\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097"+ - "\u0001\u0097\u0002\u01e9\u0244\u0000\u0098\u000b\u0001\r\u0002\u000f\u0003"+ - "\u0011\u0004\u0013\u0005\u0015\u0006\u0017\u0007\u0019\b\u001b\t\u001d"+ - "\n\u001f\u000b!\f#\r%\u000e\'\u000f)\u0010+\u0011-\u0012/\u00131\u0014"+ - "3\u00155\u00007\u00009\u0016;\u0017=\u0018?\u0019A\u0000C\u0000E\u0000"+ - "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0000U\u001aW\u001bY\u001c"+ - "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ - "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ - ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5\u0000\u00a7B\u00a9C"+ - "\u00abD\u00adE\u00af\u0000\u00b1\u0000\u00b3\u0000\u00b5\u0000\u00b7\u0000"+ - "\u00b9F\u00bb\u0000\u00bdG\u00bf\u0000\u00c1H\u00c3I\u00c5J\u00c7\u0000"+ - "\u00c9\u0000\u00cb\u0000\u00cd\u0000\u00cf\u0000\u00d1K\u00d3L\u00d5M"+ - "\u00d7N\u00d9\u0000\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1O\u00e3\u0000"+ - "\u00e5P\u00e7Q\u00e9R\u00eb\u0000\u00ed\u0000\u00efS\u00f1T\u00f3\u0000"+ - "\u00f5U\u00f7\u0000\u00f9\u0000\u00fbV\u00fdW\u00ffX\u0101\u0000\u0103"+ - "\u0000\u0105\u0000\u0107\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f"+ - "Y\u0111Z\u0113[\u0115\u0000\u0117\u0000\u0119\u0000\u011b\u0000\u011d"+ - "\\\u011f]\u0121^\u0123\u0000\u0125_\u0127`\u0129a\u012bb\u012dc\u012f"+ - "\u0000\u0131d\u0133e\u0135f\u0137g\u0139h\u000b\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000"+ - "\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ - "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ - "++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b"+ - "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0497\u0000\u000b\u0001\u0000\u0000"+ - "\u0000\u0000\r\u0001\u0000\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00015\u0001\u0000\u0000\u0000\u00017\u0001"+ - "\u0000\u0000\u0000\u00019\u0001\u0000\u0000\u0000\u0001;\u0001\u0000\u0000"+ - "\u0000\u0001=\u0001\u0000\u0000\u0000\u0002?\u0001\u0000\u0000\u0000\u0002"+ - "U\u0001\u0000\u0000\u0000\u0002W\u0001\u0000\u0000\u0000\u0002Y\u0001"+ - "\u0000\u0000\u0000\u0002[\u0001\u0000\u0000\u0000\u0002]\u0001\u0000\u0000"+ - "\u0000\u0002_\u0001\u0000\u0000\u0000\u0002a\u0001\u0000\u0000\u0000\u0002"+ - "c\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000\u0000\u0002g\u0001"+ - "\u0000\u0000\u0000\u0002i\u0001\u0000\u0000\u0000\u0002k\u0001\u0000\u0000"+ - "\u0000\u0002m\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0002"+ - "q\u0001\u0000\u0000\u0000\u0002s\u0001\u0000\u0000\u0000\u0002u\u0001"+ - "\u0000\u0000\u0000\u0002w\u0001\u0000\u0000\u0000\u0002y\u0001\u0000\u0000"+ - "\u0000\u0002{\u0001\u0000\u0000\u0000\u0002}\u0001\u0000\u0000\u0000\u0002"+ - "\u007f\u0001\u0000\u0000\u0000\u0002\u0081\u0001\u0000\u0000\u0000\u0002"+ - "\u0083\u0001\u0000\u0000\u0000\u0002\u0085\u0001\u0000\u0000\u0000\u0002"+ - "\u0087\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000\u0000\u0000\u0002"+ - "\u008b\u0001\u0000\u0000\u0000\u0002\u008d\u0001\u0000\u0000\u0000\u0002"+ - "\u008f\u0001\u0000\u0000\u0000\u0002\u0091\u0001\u0000\u0000\u0000\u0002"+ - "\u0093\u0001\u0000\u0000\u0000\u0002\u0095\u0001\u0000\u0000\u0000\u0002"+ - "\u0097\u0001\u0000\u0000\u0000\u0002\u0099\u0001\u0000\u0000\u0000\u0002"+ - "\u009b\u0001\u0000\u0000\u0000\u0002\u009d\u0001\u0000\u0000\u0000\u0002"+ - "\u009f\u0001\u0000\u0000\u0000\u0002\u00a1\u0001\u0000\u0000\u0000\u0002"+ - "\u00a3\u0001\u0000\u0000\u0000\u0002\u00a7\u0001\u0000\u0000\u0000\u0002"+ - "\u00a9\u0001\u0000\u0000\u0000\u0002\u00ab\u0001\u0000\u0000\u0000\u0002"+ - "\u00ad\u0001\u0000\u0000\u0000\u0003\u00af\u0001\u0000\u0000\u0000\u0003"+ - "\u00b1\u0001\u0000\u0000\u0000\u0003\u00b3\u0001\u0000\u0000\u0000\u0003"+ - "\u00b5\u0001\u0000\u0000\u0000\u0003\u00b7\u0001\u0000\u0000\u0000\u0003"+ - "\u00b9\u0001\u0000\u0000\u0000\u0003\u00bd\u0001\u0000\u0000\u0000\u0003"+ - "\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003"+ - "\u00c3\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0004"+ - "\u00c7\u0001\u0000\u0000\u0000\u0004\u00c9\u0001\u0000\u0000\u0000\u0004"+ - "\u00cb\u0001\u0000\u0000\u0000\u0004\u00d1\u0001\u0000\u0000\u0000\u0004"+ - "\u00d3\u0001\u0000\u0000\u0000\u0004\u00d5\u0001\u0000\u0000\u0000\u0004"+ - "\u00d7\u0001\u0000\u0000\u0000\u0005\u00d9\u0001\u0000\u0000\u0000\u0005"+ - "\u00db\u0001\u0000\u0000\u0000\u0005\u00dd\u0001\u0000\u0000\u0000\u0005"+ - "\u00df\u0001\u0000\u0000\u0000\u0005\u00e1\u0001\u0000\u0000\u0000\u0005"+ - "\u00e3\u0001\u0000\u0000\u0000\u0005\u00e5\u0001\u0000\u0000\u0000\u0005"+ - "\u00e7\u0001\u0000\u0000\u0000\u0005\u00e9\u0001\u0000\u0000\u0000\u0006"+ - "\u00eb\u0001\u0000\u0000\u0000\u0006\u00ed\u0001\u0000\u0000\u0000\u0006"+ - "\u00ef\u0001\u0000\u0000\u0000\u0006\u00f1\u0001\u0000\u0000\u0000\u0006"+ - "\u00f5\u0001\u0000\u0000\u0000\u0006\u00f7\u0001\u0000\u0000\u0000\u0006"+ - "\u00f9\u0001\u0000\u0000\u0000\u0006\u00fb\u0001\u0000\u0000\u0000\u0006"+ - "\u00fd\u0001\u0000\u0000\u0000\u0006\u00ff\u0001\u0000\u0000\u0000\u0007"+ - "\u0101\u0001\u0000\u0000\u0000\u0007\u0103\u0001\u0000\u0000\u0000\u0007"+ - "\u0105\u0001\u0000\u0000\u0000\u0007\u0107\u0001\u0000\u0000\u0000\u0007"+ - "\u0109\u0001\u0000\u0000\u0000\u0007\u010b\u0001\u0000\u0000\u0000\u0007"+ - "\u010d\u0001\u0000\u0000\u0000\u0007\u010f\u0001\u0000\u0000\u0000\u0007"+ - "\u0111\u0001\u0000\u0000\u0000\u0007\u0113\u0001\u0000\u0000\u0000\b\u0115"+ - "\u0001\u0000\u0000\u0000\b\u0117\u0001\u0000\u0000\u0000\b\u0119\u0001"+ - "\u0000\u0000\u0000\b\u011b\u0001\u0000\u0000\u0000\b\u011d\u0001\u0000"+ - "\u0000\u0000\b\u011f\u0001\u0000\u0000\u0000\b\u0121\u0001\u0000\u0000"+ - "\u0000\t\u0123\u0001\u0000\u0000\u0000\t\u0125\u0001\u0000\u0000\u0000"+ - "\t\u0127\u0001\u0000\u0000\u0000\t\u0129\u0001\u0000\u0000\u0000\t\u012b"+ - "\u0001\u0000\u0000\u0000\t\u012d\u0001\u0000\u0000\u0000\n\u012f\u0001"+ - "\u0000\u0000\u0000\n\u0131\u0001\u0000\u0000\u0000\n\u0133\u0001\u0000"+ - "\u0000\u0000\n\u0135\u0001\u0000\u0000\u0000\n\u0137\u0001\u0000\u0000"+ - "\u0000\n\u0139\u0001\u0000\u0000\u0000\u000b\u013b\u0001\u0000\u0000\u0000"+ - "\r\u0145\u0001\u0000\u0000\u0000\u000f\u014c\u0001\u0000\u0000\u0000\u0011"+ - "\u0155\u0001\u0000\u0000\u0000\u0013\u015c\u0001\u0000\u0000\u0000\u0015"+ - "\u0166\u0001\u0000\u0000\u0000\u0017\u016d\u0001\u0000\u0000\u0000\u0019"+ - "\u0174\u0001\u0000\u0000\u0000\u001b\u0182\u0001\u0000\u0000\u0000\u001d"+ - "\u0189\u0001\u0000\u0000\u0000\u001f\u0191\u0001\u0000\u0000\u0000!\u019d"+ - "\u0001\u0000\u0000\u0000#\u01a6\u0001\u0000\u0000\u0000%\u01ac\u0001\u0000"+ - "\u0000\u0000\'\u01b3\u0001\u0000\u0000\u0000)\u01ba\u0001\u0000\u0000"+ - "\u0000+\u01c2\u0001\u0000\u0000\u0000-\u01cb\u0001\u0000\u0000\u0000/"+ - "\u01d1\u0001\u0000\u0000\u00001\u01e2\u0001\u0000\u0000\u00003\u01f2\u0001"+ - "\u0000\u0000\u00005\u01f8\u0001\u0000\u0000\u00007\u01fd\u0001\u0000\u0000"+ - "\u00009\u0202\u0001\u0000\u0000\u0000;\u0206\u0001\u0000\u0000\u0000="+ - "\u020a\u0001\u0000\u0000\u0000?\u020e\u0001\u0000\u0000\u0000A\u0212\u0001"+ - "\u0000\u0000\u0000C\u0214\u0001\u0000\u0000\u0000E\u0216\u0001\u0000\u0000"+ - "\u0000G\u0219\u0001\u0000\u0000\u0000I\u021b\u0001\u0000\u0000\u0000K"+ - "\u0224\u0001\u0000\u0000\u0000M\u0226\u0001\u0000\u0000\u0000O\u022b\u0001"+ - "\u0000\u0000\u0000Q\u022d\u0001\u0000\u0000\u0000S\u0232\u0001\u0000\u0000"+ - "\u0000U\u0251\u0001\u0000\u0000\u0000W\u0254\u0001\u0000\u0000\u0000Y"+ - "\u0282\u0001\u0000\u0000\u0000[\u0284\u0001\u0000\u0000\u0000]\u0287\u0001"+ - "\u0000\u0000\u0000_\u028b\u0001\u0000\u0000\u0000a\u028f\u0001\u0000\u0000"+ - "\u0000c\u0291\u0001\u0000\u0000\u0000e\u0293\u0001\u0000\u0000\u0000g"+ - "\u0298\u0001\u0000\u0000\u0000i\u029a\u0001\u0000\u0000\u0000k\u02a0\u0001"+ - "\u0000\u0000\u0000m\u02a6\u0001\u0000\u0000\u0000o\u02ab\u0001\u0000\u0000"+ - "\u0000q\u02ad\u0001\u0000\u0000\u0000s\u02b0\u0001\u0000\u0000\u0000u"+ - "\u02b3\u0001\u0000\u0000\u0000w\u02b8\u0001\u0000\u0000\u0000y\u02bc\u0001"+ - "\u0000\u0000\u0000{\u02c1\u0001\u0000\u0000\u0000}\u02c7\u0001\u0000\u0000"+ - "\u0000\u007f\u02ca\u0001\u0000\u0000\u0000\u0081\u02cc\u0001\u0000\u0000"+ - "\u0000\u0083\u02d2\u0001\u0000\u0000\u0000\u0085\u02d4\u0001\u0000\u0000"+ - "\u0000\u0087\u02d9\u0001\u0000\u0000\u0000\u0089\u02dc\u0001\u0000\u0000"+ - "\u0000\u008b\u02df\u0001\u0000\u0000\u0000\u008d\u02e2\u0001\u0000\u0000"+ - "\u0000\u008f\u02e4\u0001\u0000\u0000\u0000\u0091\u02e7\u0001\u0000\u0000"+ - "\u0000\u0093\u02e9\u0001\u0000\u0000\u0000\u0095\u02ec\u0001\u0000\u0000"+ - "\u0000\u0097\u02ee\u0001\u0000\u0000\u0000\u0099\u02f0\u0001\u0000\u0000"+ - "\u0000\u009b\u02f2\u0001\u0000\u0000\u0000\u009d\u02f4\u0001\u0000\u0000"+ - "\u0000\u009f\u02f6\u0001\u0000\u0000\u0000\u00a1\u02fb\u0001\u0000\u0000"+ - "\u0000\u00a3\u0310\u0001\u0000\u0000\u0000\u00a5\u0312\u0001\u0000\u0000"+ - "\u0000\u00a7\u031a\u0001\u0000\u0000\u0000\u00a9\u031c\u0001\u0000\u0000"+ - "\u0000\u00ab\u0320\u0001\u0000\u0000\u0000\u00ad\u0324\u0001\u0000\u0000"+ - "\u0000\u00af\u0328\u0001\u0000\u0000\u0000\u00b1\u032d\u0001\u0000\u0000"+ - "\u0000\u00b3\u0331\u0001\u0000\u0000\u0000\u00b5\u0335\u0001\u0000\u0000"+ - "\u0000\u00b7\u0339\u0001\u0000\u0000\u0000\u00b9\u033d\u0001\u0000\u0000"+ - "\u0000\u00bb\u0349\u0001\u0000\u0000\u0000\u00bd\u034c\u0001\u0000\u0000"+ - "\u0000\u00bf\u0350\u0001\u0000\u0000\u0000\u00c1\u0354\u0001\u0000\u0000"+ - "\u0000\u00c3\u0358\u0001\u0000\u0000\u0000\u00c5\u035c\u0001\u0000\u0000"+ - "\u0000\u00c7\u0360\u0001\u0000\u0000\u0000\u00c9\u0365\u0001\u0000\u0000"+ - "\u0000\u00cb\u0369\u0001\u0000\u0000\u0000\u00cd\u0371\u0001\u0000\u0000"+ - "\u0000\u00cf\u0386\u0001\u0000\u0000\u0000\u00d1\u038a\u0001\u0000\u0000"+ - "\u0000\u00d3\u038e\u0001\u0000\u0000\u0000\u00d5\u0392\u0001\u0000\u0000"+ - "\u0000\u00d7\u0396\u0001\u0000\u0000\u0000\u00d9\u039a\u0001\u0000\u0000"+ - "\u0000\u00db\u039f\u0001\u0000\u0000\u0000\u00dd\u03a3\u0001\u0000\u0000"+ - "\u0000\u00df\u03a7\u0001\u0000\u0000\u0000\u00e1\u03ab\u0001\u0000\u0000"+ - "\u0000\u00e3\u03ae\u0001\u0000\u0000\u0000\u00e5\u03b2\u0001\u0000\u0000"+ - "\u0000\u00e7\u03b6\u0001\u0000\u0000\u0000\u00e9\u03ba\u0001\u0000\u0000"+ - "\u0000\u00eb\u03be\u0001\u0000\u0000\u0000\u00ed\u03c3\u0001\u0000\u0000"+ - "\u0000\u00ef\u03c8\u0001\u0000\u0000\u0000\u00f1\u03cd\u0001\u0000\u0000"+ - "\u0000\u00f3\u03d4\u0001\u0000\u0000\u0000\u00f5\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f7\u03e4\u0001\u0000\u0000\u0000\u00f9\u03e8\u0001\u0000\u0000"+ - "\u0000\u00fb\u03ec\u0001\u0000\u0000\u0000\u00fd\u03f0\u0001\u0000\u0000"+ - "\u0000\u00ff\u03f4\u0001\u0000\u0000\u0000\u0101\u03f8\u0001\u0000\u0000"+ - "\u0000\u0103\u03fe\u0001\u0000\u0000\u0000\u0105\u0402\u0001\u0000\u0000"+ - "\u0000\u0107\u0406\u0001\u0000\u0000\u0000\u0109\u040a\u0001\u0000\u0000"+ - "\u0000\u010b\u040e\u0001\u0000\u0000\u0000\u010d\u0412\u0001\u0000\u0000"+ - "\u0000\u010f\u0416\u0001\u0000\u0000\u0000\u0111\u041a\u0001\u0000\u0000"+ - "\u0000\u0113\u041e\u0001\u0000\u0000\u0000\u0115\u0422\u0001\u0000\u0000"+ - "\u0000\u0117\u0427\u0001\u0000\u0000\u0000\u0119\u042b\u0001\u0000\u0000"+ - "\u0000\u011b\u042f\u0001\u0000\u0000\u0000\u011d\u0433\u0001\u0000\u0000"+ - "\u0000\u011f\u0437\u0001\u0000\u0000\u0000\u0121\u043b\u0001\u0000\u0000"+ - "\u0000\u0123\u043f\u0001\u0000\u0000\u0000\u0125\u0444\u0001\u0000\u0000"+ - "\u0000\u0127\u0449\u0001\u0000\u0000\u0000\u0129\u0453\u0001\u0000\u0000"+ - "\u0000\u012b\u0457\u0001\u0000\u0000\u0000\u012d\u045b\u0001\u0000\u0000"+ - "\u0000\u012f\u045f\u0001\u0000\u0000\u0000\u0131\u0464\u0001\u0000\u0000"+ - "\u0000\u0133\u046b\u0001\u0000\u0000\u0000\u0135\u046f\u0001\u0000\u0000"+ - "\u0000\u0137\u0473\u0001\u0000\u0000\u0000\u0139\u0477\u0001\u0000\u0000"+ - "\u0000\u013b\u013c\u0005d\u0000\u0000\u013c\u013d\u0005i\u0000\u0000\u013d"+ - "\u013e\u0005s\u0000\u0000\u013e\u013f\u0005s\u0000\u0000\u013f\u0140\u0005"+ - "e\u0000\u0000\u0140\u0141\u0005c\u0000\u0000\u0141\u0142\u0005t\u0000"+ - "\u0000\u0142\u0143\u0001\u0000\u0000\u0000\u0143\u0144\u0006\u0000\u0000"+ - "\u0000\u0144\f\u0001\u0000\u0000\u0000\u0145\u0146\u0005d\u0000\u0000"+ - "\u0146\u0147\u0005r\u0000\u0000\u0147\u0148\u0005o\u0000\u0000\u0148\u0149"+ - "\u0005p\u0000\u0000\u0149\u014a\u0001\u0000\u0000\u0000\u014a\u014b\u0006"+ - "\u0001\u0001\u0000\u014b\u000e\u0001\u0000\u0000\u0000\u014c\u014d\u0005"+ - "e\u0000\u0000\u014d\u014e\u0005n\u0000\u0000\u014e\u014f\u0005r\u0000"+ - "\u0000\u014f\u0150\u0005i\u0000\u0000\u0150\u0151\u0005c\u0000\u0000\u0151"+ - "\u0152\u0005h\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0154"+ - "\u0006\u0002\u0002\u0000\u0154\u0010\u0001\u0000\u0000\u0000\u0155\u0156"+ - "\u0005e\u0000\u0000\u0156\u0157\u0005v\u0000\u0000\u0157\u0158\u0005a"+ - "\u0000\u0000\u0158\u0159\u0005l\u0000\u0000\u0159\u015a\u0001\u0000\u0000"+ - "\u0000\u015a\u015b\u0006\u0003\u0000\u0000\u015b\u0012\u0001\u0000\u0000"+ - "\u0000\u015c\u015d\u0005e\u0000\u0000\u015d\u015e\u0005x\u0000\u0000\u015e"+ - "\u015f\u0005p\u0000\u0000\u015f\u0160\u0005l\u0000\u0000\u0160\u0161\u0005"+ - "a\u0000\u0000\u0161\u0162\u0005i\u0000\u0000\u0162\u0163\u0005n\u0000"+ - "\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164\u0165\u0006\u0004\u0003"+ - "\u0000\u0165\u0014\u0001\u0000\u0000\u0000\u0166\u0167\u0005f\u0000\u0000"+ - "\u0167\u0168\u0005r\u0000\u0000\u0168\u0169\u0005o\u0000\u0000\u0169\u016a"+ - "\u0005m\u0000\u0000\u016a\u016b\u0001\u0000\u0000\u0000\u016b\u016c\u0006"+ - "\u0005\u0004\u0000\u016c\u0016\u0001\u0000\u0000\u0000\u016d\u016e\u0005"+ - "g\u0000\u0000\u016e\u016f\u0005r\u0000\u0000\u016f\u0170\u0005o\u0000"+ - "\u0000\u0170\u0171\u0005k\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000"+ - "\u0172\u0173\u0006\u0006\u0000\u0000\u0173\u0018\u0001\u0000\u0000\u0000"+ - "\u0174\u0175\u0005i\u0000\u0000\u0175\u0176\u0005n\u0000\u0000\u0176\u0177"+ - "\u0005l\u0000\u0000\u0177\u0178\u0005i\u0000\u0000\u0178\u0179\u0005n"+ - "\u0000\u0000\u0179\u017a\u0005e\u0000\u0000\u017a\u017b\u0005s\u0000\u0000"+ - "\u017b\u017c\u0005t\u0000\u0000\u017c\u017d\u0005a\u0000\u0000\u017d\u017e"+ - "\u0005t\u0000\u0000\u017e\u017f\u0005s\u0000\u0000\u017f\u0180\u0001\u0000"+ - "\u0000\u0000\u0180\u0181\u0006\u0007\u0000\u0000\u0181\u001a\u0001\u0000"+ - "\u0000\u0000\u0182\u0183\u0005k\u0000\u0000\u0183\u0184\u0005e\u0000\u0000"+ - "\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005p\u0000\u0000\u0186\u0187"+ - "\u0001\u0000\u0000\u0000\u0187\u0188\u0006\b\u0001\u0000\u0188\u001c\u0001"+ - "\u0000\u0000\u0000\u0189\u018a\u0005l\u0000\u0000\u018a\u018b\u0005i\u0000"+ - "\u0000\u018b\u018c\u0005m\u0000\u0000\u018c\u018d\u0005i\u0000\u0000\u018d"+ - "\u018e\u0005t\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000\u018f\u0190"+ - "\u0006\t\u0000\u0000\u0190\u001e\u0001\u0000\u0000\u0000\u0191\u0192\u0005"+ - "m\u0000\u0000\u0192\u0193\u0005v\u0000\u0000\u0193\u0194\u0005_\u0000"+ - "\u0000\u0194\u0195\u0005e\u0000\u0000\u0195\u0196\u0005x\u0000\u0000\u0196"+ - "\u0197\u0005p\u0000\u0000\u0197\u0198\u0005a\u0000\u0000\u0198\u0199\u0005"+ - "n\u0000\u0000\u0199\u019a\u0005d\u0000\u0000\u019a\u019b\u0001\u0000\u0000"+ - "\u0000\u019b\u019c\u0006\n\u0005\u0000\u019c \u0001\u0000\u0000\u0000"+ - "\u019d\u019e\u0005r\u0000\u0000\u019e\u019f\u0005e\u0000\u0000\u019f\u01a0"+ - "\u0005n\u0000\u0000\u01a0\u01a1\u0005a\u0000\u0000\u01a1\u01a2\u0005m"+ - "\u0000\u0000\u01a2\u01a3\u0005e\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a4\u01a5\u0006\u000b\u0006\u0000\u01a5\"\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a7\u0005r\u0000\u0000\u01a7\u01a8\u0005o\u0000\u0000\u01a8\u01a9"+ - "\u0005w\u0000\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000\u01aa\u01ab\u0006"+ - "\f\u0000\u0000\u01ab$\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005s\u0000"+ - "\u0000\u01ad\u01ae\u0005h\u0000\u0000\u01ae\u01af\u0005o\u0000\u0000\u01af"+ - "\u01b0\u0005w\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000\u0000\u01b1\u01b2"+ - "\u0006\r\u0007\u0000\u01b2&\u0001\u0000\u0000\u0000\u01b3\u01b4\u0005"+ - "s\u0000\u0000\u01b4\u01b5\u0005o\u0000\u0000\u01b5\u01b6\u0005r\u0000"+ - "\u0000\u01b6\u01b7\u0005t\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000"+ - "\u01b8\u01b9\u0006\u000e\u0000\u0000\u01b9(\u0001\u0000\u0000\u0000\u01ba"+ - "\u01bb\u0005s\u0000\u0000\u01bb\u01bc\u0005t\u0000\u0000\u01bc\u01bd\u0005"+ - "a\u0000\u0000\u01bd\u01be\u0005t\u0000\u0000\u01be\u01bf\u0005s\u0000"+ - "\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006\u000f\u0000"+ - "\u0000\u01c1*\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005w\u0000\u0000\u01c3"+ - "\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005e\u0000\u0000\u01c5\u01c6\u0005"+ - "r\u0000\u0000\u01c6\u01c7\u0005e\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000"+ - "\u0000\u01c8\u01c9\u0006\u0010\u0000\u0000\u01c9,\u0001\u0000\u0000\u0000"+ - "\u01ca\u01cc\b\u0000\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0001\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0006\u0011\u0000\u0000\u01d0.\u0001\u0000\u0000\u0000\u01d1\u01d2"+ - "\u0005/\u0000\u0000\u01d2\u01d3\u0005/\u0000\u0000\u01d3\u01d7\u0001\u0000"+ - "\u0000\u0000\u01d4\u01d6\b\u0001\u0000\u0000\u01d5\u01d4\u0001\u0000\u0000"+ - "\u0000\u01d6\u01d9\u0001\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01db\u0001\u0000\u0000"+ - "\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01da\u01dc\u0005\r\u0000\u0000"+ - "\u01db\u01da\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000\u0000"+ - "\u01dc\u01de\u0001\u0000\u0000\u0000\u01dd\u01df\u0005\n\u0000\u0000\u01de"+ - "\u01dd\u0001\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01df"+ - "\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0006\u0012\b\u0000\u01e10"+ - "\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005/\u0000\u0000\u01e3\u01e4\u0005"+ - "*\u0000\u0000\u01e4\u01e9\u0001\u0000\u0000\u0000\u01e5\u01e8\u00031\u0013"+ - "\u0000\u01e6\u01e8\t\u0000\u0000\u0000\u01e7\u01e5\u0001\u0000\u0000\u0000"+ - "\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000\u0000\u0000"+ - "\u01ea\u01ec\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01ec\u01ed\u0005*\u0000\u0000\u01ed\u01ee\u0005/\u0000\u0000\u01ee\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\u0013\b\u0000\u01f02\u0001"+ - "\u0000\u0000\u0000\u01f1\u01f3\u0007\u0002\u0000\u0000\u01f2\u01f1\u0001"+ - "\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4\u01f2\u0001"+ - "\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f6\u01f7\u0006\u0014\b\u0000\u01f74\u0001\u0000"+ - "\u0000\u0000\u01f8\u01f9\u0003\u009fJ\u0000\u01f9\u01fa\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0006\u0015\t\u0000\u01fb\u01fc\u0006\u0015\n\u0000"+ - "\u01fc6\u0001\u0000\u0000\u0000\u01fd\u01fe\u0003?\u001a\u0000\u01fe\u01ff"+ - "\u0001\u0000\u0000\u0000\u01ff\u0200\u0006\u0016\u000b\u0000\u0200\u0201"+ - "\u0006\u0016\f\u0000\u02018\u0001\u0000\u0000\u0000\u0202\u0203\u0003"+ - "3\u0014\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205\u0006\u0017"+ - "\b\u0000\u0205:\u0001\u0000\u0000\u0000\u0206\u0207\u0003/\u0012\u0000"+ - "\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006\u0018\b\u0000\u0209"+ - "<\u0001\u0000\u0000\u0000\u020a\u020b\u00031\u0013\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\u0019\b\u0000\u020d>\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0005|\u0000\u0000\u020f\u0210\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0006\u001a\f\u0000\u0211@\u0001\u0000\u0000\u0000"+ - "\u0212\u0213\u0007\u0003\u0000\u0000\u0213B\u0001\u0000\u0000\u0000\u0214"+ - "\u0215\u0007\u0004\u0000\u0000\u0215D\u0001\u0000\u0000\u0000\u0216\u0217"+ - "\u0005\\\u0000\u0000\u0217\u0218\u0007\u0005\u0000\u0000\u0218F\u0001"+ - "\u0000\u0000\u0000\u0219\u021a\b\u0006\u0000\u0000\u021aH\u0001\u0000"+ - "\u0000\u0000\u021b\u021d\u0007\u0007\u0000\u0000\u021c\u021e\u0007\b\u0000"+ - "\u0000\u021d\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000"+ - "\u0000\u021e\u0220\u0001\u0000\u0000\u0000\u021f\u0221\u0003A\u001b\u0000"+ - "\u0220\u021f\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000"+ - "\u0222\u0220\u0001\u0000\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000"+ - "\u0223J\u0001\u0000\u0000\u0000\u0224\u0225\u0005@\u0000\u0000\u0225L"+ - "\u0001\u0000\u0000\u0000\u0226\u0227\u0005`\u0000\u0000\u0227N\u0001\u0000"+ - "\u0000\u0000\u0228\u022c\b\t\u0000\u0000\u0229\u022a\u0005`\u0000\u0000"+ - "\u022a\u022c\u0005`\u0000\u0000\u022b\u0228\u0001\u0000\u0000\u0000\u022b"+ - "\u0229\u0001\u0000\u0000\u0000\u022cP\u0001\u0000\u0000\u0000\u022d\u022e"+ - "\u0005_\u0000\u0000\u022eR\u0001\u0000\u0000\u0000\u022f\u0233\u0003C"+ - "\u001c\u0000\u0230\u0233\u0003A\u001b\u0000\u0231\u0233\u0003Q#\u0000"+ - "\u0232\u022f\u0001\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000"+ - "\u0232\u0231\u0001\u0000\u0000\u0000\u0233T\u0001\u0000\u0000\u0000\u0234"+ - "\u0239\u0005\"\u0000\u0000\u0235\u0238\u0003E\u001d\u0000\u0236\u0238"+ - "\u0003G\u001e\u0000\u0237\u0235\u0001\u0000\u0000\u0000\u0237\u0236\u0001"+ - "\u0000\u0000\u0000\u0238\u023b\u0001\u0000\u0000\u0000\u0239\u0237\u0001"+ - "\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023c\u0001"+ - "\u0000\u0000\u0000\u023b\u0239\u0001\u0000\u0000\u0000\u023c\u0252\u0005"+ - "\"\u0000\u0000\u023d\u023e\u0005\"\u0000\u0000\u023e\u023f\u0005\"\u0000"+ - "\u0000\u023f\u0240\u0005\"\u0000\u0000\u0240\u0244\u0001\u0000\u0000\u0000"+ - "\u0241\u0243\b\u0001\u0000\u0000\u0242\u0241\u0001\u0000\u0000\u0000\u0243"+ - "\u0246\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0244"+ - "\u0242\u0001\u0000\u0000\u0000\u0245\u0247\u0001\u0000\u0000\u0000\u0246"+ - "\u0244\u0001\u0000\u0000\u0000\u0247\u0248\u0005\"\u0000\u0000\u0248\u0249"+ - "\u0005\"\u0000\u0000\u0249\u024a\u0005\"\u0000\u0000\u024a\u024c\u0001"+ - "\u0000\u0000\u0000\u024b\u024d\u0005\"\u0000\u0000\u024c\u024b\u0001\u0000"+ - "\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000\u024d\u024f\u0001\u0000"+ - "\u0000\u0000\u024e\u0250\u0005\"\u0000\u0000\u024f\u024e\u0001\u0000\u0000"+ - "\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000\u0000"+ - "\u0000\u0251\u0234\u0001\u0000\u0000\u0000\u0251\u023d\u0001\u0000\u0000"+ - "\u0000\u0252V\u0001\u0000\u0000\u0000\u0253\u0255\u0003A\u001b\u0000\u0254"+ - "\u0253\u0001\u0000\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256"+ - "\u0254\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0257"+ - "X\u0001\u0000\u0000\u0000\u0258\u025a\u0003A\u001b\u0000\u0259\u0258\u0001"+ - "\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025b\u0259\u0001"+ - "\u0000\u0000\u0000\u025b\u025c\u0001\u0000\u0000\u0000\u025c\u025d\u0001"+ - "\u0000\u0000\u0000\u025d\u0261\u0003g.\u0000\u025e\u0260\u0003A\u001b"+ - "\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000\u0000"+ - "\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261\u0262\u0001\u0000\u0000"+ - "\u0000\u0262\u0283\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000\u0000"+ - "\u0000\u0264\u0266\u0003g.\u0000\u0265\u0267\u0003A\u001b\u0000\u0266"+ - "\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268"+ - "\u0266\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269"+ - "\u0283\u0001\u0000\u0000\u0000\u026a\u026c\u0003A\u001b\u0000\u026b\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026b"+ - "\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u0276"+ - "\u0001\u0000\u0000\u0000\u026f\u0273\u0003g.\u0000\u0270\u0272\u0003A"+ - "\u001b\u0000\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001\u0000"+ - "\u0000\u0000\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000"+ - "\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000"+ - "\u0000\u0000\u0276\u026f\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000"+ - "\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000\u0278\u0279\u0003I\u001f"+ - "\u0000\u0279\u0283\u0001\u0000\u0000\u0000\u027a\u027c\u0003g.\u0000\u027b"+ - "\u027d\u0003A\u001b\u0000\u027c\u027b\u0001\u0000\u0000\u0000\u027d\u027e"+ - "\u0001\u0000\u0000\u0000\u027e\u027c\u0001\u0000\u0000\u0000\u027e\u027f"+ - "\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0281"+ - "\u0003I\u001f\u0000\u0281\u0283\u0001\u0000\u0000\u0000\u0282\u0259\u0001"+ - "\u0000\u0000\u0000\u0282\u0264\u0001\u0000\u0000\u0000\u0282\u026b\u0001"+ - "\u0000\u0000\u0000\u0282\u027a\u0001\u0000\u0000\u0000\u0283Z\u0001\u0000"+ - "\u0000\u0000\u0284\u0285\u0005b\u0000\u0000\u0285\u0286\u0005y\u0000\u0000"+ - "\u0286\\\u0001\u0000\u0000\u0000\u0287\u0288\u0005a\u0000\u0000\u0288"+ - "\u0289\u0005n\u0000\u0000\u0289\u028a\u0005d\u0000\u0000\u028a^\u0001"+ - "\u0000\u0000\u0000\u028b\u028c\u0005a\u0000\u0000\u028c\u028d\u0005s\u0000"+ - "\u0000\u028d\u028e\u0005c\u0000\u0000\u028e`\u0001\u0000\u0000\u0000\u028f"+ - "\u0290\u0005=\u0000\u0000\u0290b\u0001\u0000\u0000\u0000\u0291\u0292\u0005"+ - ",\u0000\u0000\u0292d\u0001\u0000\u0000\u0000\u0293\u0294\u0005d\u0000"+ - "\u0000\u0294\u0295\u0005e\u0000\u0000\u0295\u0296\u0005s\u0000\u0000\u0296"+ - "\u0297\u0005c\u0000\u0000\u0297f\u0001\u0000\u0000\u0000\u0298\u0299\u0005"+ - ".\u0000\u0000\u0299h\u0001\u0000\u0000\u0000\u029a\u029b\u0005f\u0000"+ - "\u0000\u029b\u029c\u0005a\u0000\u0000\u029c\u029d\u0005l\u0000\u0000\u029d"+ - "\u029e\u0005s\u0000\u0000\u029e\u029f\u0005e\u0000\u0000\u029fj\u0001"+ - "\u0000\u0000\u0000\u02a0\u02a1\u0005f\u0000\u0000\u02a1\u02a2\u0005i\u0000"+ - "\u0000\u02a2\u02a3\u0005r\u0000\u0000\u02a3\u02a4\u0005s\u0000\u0000\u02a4"+ - "\u02a5\u0005t\u0000\u0000\u02a5l\u0001\u0000\u0000\u0000\u02a6\u02a7\u0005"+ - "l\u0000\u0000\u02a7\u02a8\u0005a\u0000\u0000\u02a8\u02a9\u0005s\u0000"+ - "\u0000\u02a9\u02aa\u0005t\u0000\u0000\u02aan\u0001\u0000\u0000\u0000\u02ab"+ - "\u02ac\u0005(\u0000\u0000\u02acp\u0001\u0000\u0000\u0000\u02ad\u02ae\u0005"+ - "i\u0000\u0000\u02ae\u02af\u0005n\u0000\u0000\u02afr\u0001\u0000\u0000"+ - "\u0000\u02b0\u02b1\u0005i\u0000\u0000\u02b1\u02b2\u0005s\u0000\u0000\u02b2"+ - "t\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005l\u0000\u0000\u02b4\u02b5\u0005"+ - "i\u0000\u0000\u02b5\u02b6\u0005k\u0000\u0000\u02b6\u02b7\u0005e\u0000"+ - "\u0000\u02b7v\u0001\u0000\u0000\u0000\u02b8\u02b9\u0005n\u0000\u0000\u02b9"+ - "\u02ba\u0005o\u0000\u0000\u02ba\u02bb\u0005t\u0000\u0000\u02bbx\u0001"+ - "\u0000\u0000\u0000\u02bc\u02bd\u0005n\u0000\u0000\u02bd\u02be\u0005u\u0000"+ - "\u0000\u02be\u02bf\u0005l\u0000\u0000\u02bf\u02c0\u0005l\u0000\u0000\u02c0"+ - "z\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005n\u0000\u0000\u02c2\u02c3\u0005"+ - "u\u0000\u0000\u02c3\u02c4\u0005l\u0000\u0000\u02c4\u02c5\u0005l\u0000"+ - "\u0000\u02c5\u02c6\u0005s\u0000\u0000\u02c6|\u0001\u0000\u0000\u0000\u02c7"+ - "\u02c8\u0005o\u0000\u0000\u02c8\u02c9\u0005r\u0000\u0000\u02c9~\u0001"+ - "\u0000\u0000\u0000\u02ca\u02cb\u0005?\u0000\u0000\u02cb\u0080\u0001\u0000"+ - "\u0000\u0000\u02cc\u02cd\u0005r\u0000\u0000\u02cd\u02ce\u0005l\u0000\u0000"+ - "\u02ce\u02cf\u0005i\u0000\u0000\u02cf\u02d0\u0005k\u0000\u0000\u02d0\u02d1"+ - "\u0005e\u0000\u0000\u02d1\u0082\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005"+ - ")\u0000\u0000\u02d3\u0084\u0001\u0000\u0000\u0000\u02d4\u02d5\u0005t\u0000"+ - "\u0000\u02d5\u02d6\u0005r\u0000\u0000\u02d6\u02d7\u0005u\u0000\u0000\u02d7"+ - "\u02d8\u0005e\u0000\u0000\u02d8\u0086\u0001\u0000\u0000\u0000\u02d9\u02da"+ - "\u0005=\u0000\u0000\u02da\u02db\u0005=\u0000\u0000\u02db\u0088\u0001\u0000"+ - "\u0000\u0000\u02dc\u02dd\u0005=\u0000\u0000\u02dd\u02de\u0005~\u0000\u0000"+ - "\u02de\u008a\u0001\u0000\u0000\u0000\u02df\u02e0\u0005!\u0000\u0000\u02e0"+ - "\u02e1\u0005=\u0000\u0000\u02e1\u008c\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0005<\u0000\u0000\u02e3\u008e\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005"+ - "<\u0000\u0000\u02e5\u02e6\u0005=\u0000\u0000\u02e6\u0090\u0001\u0000\u0000"+ - "\u0000\u02e7\u02e8\u0005>\u0000\u0000\u02e8\u0092\u0001\u0000\u0000\u0000"+ - "\u02e9\u02ea\u0005>\u0000\u0000\u02ea\u02eb\u0005=\u0000\u0000\u02eb\u0094"+ - "\u0001\u0000\u0000\u0000\u02ec\u02ed\u0005+\u0000\u0000\u02ed\u0096\u0001"+ - "\u0000\u0000\u0000\u02ee\u02ef\u0005-\u0000\u0000\u02ef\u0098\u0001\u0000"+ - "\u0000\u0000\u02f0\u02f1\u0005*\u0000\u0000\u02f1\u009a\u0001\u0000\u0000"+ - "\u0000\u02f2\u02f3\u0005/\u0000\u0000\u02f3\u009c\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f5\u0005%\u0000\u0000\u02f5\u009e\u0001\u0000\u0000\u0000\u02f6"+ - "\u02f7\u0005[\u0000\u0000\u02f7\u02f8\u0001\u0000\u0000\u0000\u02f8\u02f9"+ - "\u0006J\u0000\u0000\u02f9\u02fa\u0006J\u0000\u0000\u02fa\u00a0\u0001\u0000"+ - "\u0000\u0000\u02fb\u02fc\u0005]\u0000\u0000\u02fc\u02fd\u0001\u0000\u0000"+ - "\u0000\u02fd\u02fe\u0006K\f\u0000\u02fe\u02ff\u0006K\f\u0000\u02ff\u00a2"+ - "\u0001\u0000\u0000\u0000\u0300\u0304\u0003C\u001c\u0000\u0301\u0303\u0003"+ - "S$\u0000\u0302\u0301\u0001\u0000\u0000\u0000\u0303\u0306\u0001\u0000\u0000"+ - "\u0000\u0304\u0302\u0001\u0000\u0000\u0000\u0304\u0305\u0001\u0000\u0000"+ - "\u0000\u0305\u0311\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000"+ - "\u0000\u0307\u030a\u0003Q#\u0000\u0308\u030a\u0003K \u0000\u0309\u0307"+ - "\u0001\u0000\u0000\u0000\u0309\u0308\u0001\u0000\u0000\u0000\u030a\u030c"+ - "\u0001\u0000\u0000\u0000\u030b\u030d\u0003S$\u0000\u030c\u030b\u0001\u0000"+ - "\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u030c\u0001\u0000"+ - "\u0000\u0000\u030e\u030f\u0001\u0000\u0000\u0000\u030f\u0311\u0001\u0000"+ - "\u0000\u0000\u0310\u0300\u0001\u0000\u0000\u0000\u0310\u0309\u0001\u0000"+ - "\u0000\u0000\u0311\u00a4\u0001\u0000\u0000\u0000\u0312\u0314\u0003M!\u0000"+ - "\u0313\u0315\u0003O\"\u0000\u0314\u0313\u0001\u0000\u0000\u0000\u0315"+ - "\u0316\u0001\u0000\u0000\u0000\u0316\u0314\u0001\u0000\u0000\u0000\u0316"+ - "\u0317\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000\u0000\u0318"+ - "\u0319\u0003M!\u0000\u0319\u00a6\u0001\u0000\u0000\u0000\u031a\u031b\u0003"+ - "\u00a5M\u0000\u031b\u00a8\u0001\u0000\u0000\u0000\u031c\u031d\u0003/\u0012"+ - "\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e\u031f\u0006O\b\u0000"+ - "\u031f\u00aa\u0001\u0000\u0000\u0000\u0320\u0321\u00031\u0013\u0000\u0321"+ - "\u0322\u0001\u0000\u0000\u0000\u0322\u0323\u0006P\b\u0000\u0323\u00ac"+ - "\u0001\u0000\u0000\u0000\u0324\u0325\u00033\u0014\u0000\u0325\u0326\u0001"+ - "\u0000\u0000\u0000\u0326\u0327\u0006Q\b\u0000\u0327\u00ae\u0001\u0000"+ - "\u0000\u0000\u0328\u0329\u0003?\u001a\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032b\u0006R\u000b\u0000\u032b\u032c\u0006R\f\u0000\u032c"+ - "\u00b0\u0001\u0000\u0000\u0000\u032d\u032e\u0003\u009fJ\u0000\u032e\u032f"+ - "\u0001\u0000\u0000\u0000\u032f\u0330\u0006S\t\u0000\u0330\u00b2\u0001"+ - "\u0000\u0000\u0000\u0331\u0332\u0003\u00a1K\u0000\u0332\u0333\u0001\u0000"+ - "\u0000\u0000\u0333\u0334\u0006T\r\u0000\u0334\u00b4\u0001\u0000\u0000"+ - "\u0000\u0335\u0336\u0003c,\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ - "\u0338\u0006U\u000e\u0000\u0338\u00b6\u0001\u0000\u0000\u0000\u0339\u033a"+ - "\u0003a+\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006V"+ - "\u000f\u0000\u033c\u00b8\u0001\u0000\u0000\u0000\u033d\u033e\u0005m\u0000"+ - "\u0000\u033e\u033f\u0005e\u0000\u0000\u033f\u0340\u0005t\u0000\u0000\u0340"+ - "\u0341\u0005a\u0000\u0000\u0341\u0342\u0005d\u0000\u0000\u0342\u0343\u0005"+ - "a\u0000\u0000\u0343\u0344\u0005t\u0000\u0000\u0344\u0345\u0005a\u0000"+ - "\u0000\u0345\u00ba\u0001\u0000\u0000\u0000\u0346\u034a\b\n\u0000\u0000"+ - "\u0347\u0348\u0005/\u0000\u0000\u0348\u034a\b\u000b\u0000\u0000\u0349"+ - "\u0346\u0001\u0000\u0000\u0000\u0349\u0347\u0001\u0000\u0000\u0000\u034a"+ - "\u00bc\u0001\u0000\u0000\u0000\u034b\u034d\u0003\u00bbX\u0000\u034c\u034b"+ - "\u0001\u0000\u0000\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034c"+ - "\u0001\u0000\u0000\u0000\u034e\u034f\u0001\u0000\u0000\u0000\u034f\u00be"+ - "\u0001\u0000\u0000\u0000\u0350\u0351\u0003\u00a7N\u0000\u0351\u0352\u0001"+ - "\u0000\u0000\u0000\u0352\u0353\u0006Z\u0010\u0000\u0353\u00c0\u0001\u0000"+ - "\u0000\u0000\u0354\u0355\u0003/\u0012\u0000\u0355\u0356\u0001\u0000\u0000"+ - "\u0000\u0356\u0357\u0006[\b\u0000\u0357\u00c2\u0001\u0000\u0000\u0000"+ - "\u0358\u0359\u00031\u0013\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006\\\b\u0000\u035b\u00c4\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u00033\u0014\u0000\u035d\u035e\u0001\u0000\u0000\u0000\u035e\u035f\u0006"+ - "]\b\u0000\u035f\u00c6\u0001\u0000\u0000\u0000\u0360\u0361\u0003?\u001a"+ - "\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006^\u000b\u0000"+ - "\u0363\u0364\u0006^\f\u0000\u0364\u00c8\u0001\u0000\u0000\u0000\u0365"+ - "\u0366\u0003g.\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0368\u0006"+ - "_\u0011\u0000\u0368\u00ca\u0001\u0000\u0000\u0000\u0369\u036a\u0003c,"+ - "\u0000\u036a\u036b\u0001\u0000\u0000\u0000\u036b\u036c\u0006`\u000e\u0000"+ - "\u036c\u00cc\u0001\u0000\u0000\u0000\u036d\u0372\u0003C\u001c\u0000\u036e"+ - "\u0372\u0003A\u001b\u0000\u036f\u0372\u0003Q#\u0000\u0370\u0372\u0003"+ - "\u0099G\u0000\u0371\u036d\u0001\u0000\u0000\u0000\u0371\u036e\u0001\u0000"+ - "\u0000\u0000\u0371\u036f\u0001\u0000\u0000\u0000\u0371\u0370\u0001\u0000"+ - "\u0000\u0000\u0372\u00ce\u0001\u0000\u0000\u0000\u0373\u0376\u0003C\u001c"+ - "\u0000\u0374\u0376\u0003\u0099G\u0000\u0375\u0373\u0001\u0000\u0000\u0000"+ - "\u0375\u0374\u0001\u0000\u0000\u0000\u0376\u037a\u0001\u0000\u0000\u0000"+ - "\u0377\u0379\u0003\u00cda\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379"+ - "\u037c\u0001\u0000\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a"+ - "\u037b\u0001\u0000\u0000\u0000\u037b\u0387\u0001\u0000\u0000\u0000\u037c"+ - "\u037a\u0001\u0000\u0000\u0000\u037d\u0380\u0003Q#\u0000\u037e\u0380\u0003"+ - "K \u0000\u037f\u037d\u0001\u0000\u0000\u0000\u037f\u037e\u0001\u0000\u0000"+ - "\u0000\u0380\u0382\u0001\u0000\u0000\u0000\u0381\u0383\u0003\u00cda\u0000"+ - "\u0382\u0381\u0001\u0000\u0000\u0000\u0383\u0384\u0001\u0000\u0000\u0000"+ - "\u0384\u0382\u0001\u0000\u0000\u0000\u0384\u0385\u0001\u0000\u0000\u0000"+ - "\u0385\u0387\u0001\u0000\u0000\u0000\u0386\u0375\u0001\u0000\u0000\u0000"+ - "\u0386\u037f\u0001\u0000\u0000\u0000\u0387\u00d0\u0001\u0000\u0000\u0000"+ - "\u0388\u038b\u0003\u00cfb\u0000\u0389\u038b\u0003\u00a5M\u0000\u038a\u0388"+ - "\u0001\u0000\u0000\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038c"+ + "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0004\u0099\u048f\b\u0099\u000b\u0099\f\u0099\u0490\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0002"+ + "\u01fb\u0256\u0000\u009d\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014"+ + "\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&"+ + "\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u00156\u00168\u0000"+ + ":\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000"+ + "N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001f"+ + "b d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a"+ + "4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e"+ + ">\u00a0?\u00a2@\u00a4A\u00a6B\u00a8\u0000\u00aaC\u00acD\u00aeE\u00b0F"+ + "\u00b2\u0000\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000\u00bcG\u00be"+ + "\u0000\u00c0H\u00c2\u0000\u00c4I\u00c6J\u00c8K\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4L\u00d6M\u00d8N\u00daO\u00dc"+ + "\u0000\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4P\u00e6\u0000\u00e8Q\u00ea"+ + "R\u00ecS\u00ee\u0000\u00f0\u0000\u00f2T\u00f4U\u00f6\u0000\u00f8V\u00fa"+ + "\u0000\u00fc\u0000\u00feW\u0100X\u0102Y\u0104\u0000\u0106\u0000\u0108"+ + "\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112Z\u0114[\u0116"+ + "\\\u0118\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120]\u0122^\u0124"+ + "_\u0126\u0000\u0128`\u012aa\u012cb\u012ec\u0130\u0000\u0132d\u0134e\u0136"+ + "f\u0138g\u013a\u0000\u013ch\u013ei\u0140j\u0142k\u0144l\f\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t\n\r\r"+ + " //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]`"+ + "`||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04b9\u0000"+ + "\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010"+ + "\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014"+ + "\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018"+ + "\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c"+ + "\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001"+ + "\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000"+ + "\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000"+ + "\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000."+ + "\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000"+ + "\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000"+ + "\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001<"+ + "\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001\u0000"+ + "\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000"+ + "\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002"+ + "^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001"+ + "\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000"+ + "\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002"+ + "l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001"+ + "\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000"+ + "\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002"+ + "z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001"+ + "\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082\u0001"+ + "\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086\u0001"+ + "\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a\u0001"+ + "\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e\u0001"+ + "\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092\u0001"+ + "\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096\u0001"+ + "\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a\u0001"+ + "\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e\u0001"+ + "\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2\u0001"+ + "\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ + "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8\u0001"+ + "\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0004\u00ca\u0001"+ + "\u0000\u0000\u0000\u0004\u00cc\u0001\u0000\u0000\u0000\u0004\u00ce\u0001"+ + "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ + "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001"+ + "\u0000\u0000\u0000\u0005\u00dc\u0001\u0000\u0000\u0000\u0005\u00de\u0001"+ + "\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2\u0001"+ + "\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ + "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0006\u00ee\u0001"+ + "\u0000\u0000\u0000\u0006\u00f0\u0001\u0000\u0000\u0000\u0006\u00f2\u0001"+ + "\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ + "\u0000\u0000\u0000\u0006\u00fa\u0001\u0000\u0000\u0000\u0006\u00fc\u0001"+ + "\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100\u0001"+ + "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ + "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ + "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ + "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000"+ + "\u0000\u0000\b\u011a\u0001\u0000\u0000\u0000\b\u011c\u0001\u0000\u0000"+ + "\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001\u0000\u0000\u0000"+ + "\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000\u0000\u0000\t\u0126"+ + "\u0001\u0000\u0000\u0000\t\u0128\u0001\u0000\u0000\u0000\t\u012a\u0001"+ + "\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000\t\u012e\u0001\u0000"+ + "\u0000\u0000\n\u0130\u0001\u0000\u0000\u0000\n\u0132\u0001\u0000\u0000"+ + "\u0000\n\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001\u0000\u0000\u0000"+ + "\n\u0138\u0001\u0000\u0000\u0000\u000b\u013a\u0001\u0000\u0000\u0000\u000b"+ + "\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001\u0000\u0000\u0000\u000b"+ + "\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000\u0000\u000b"+ + "\u0144\u0001\u0000\u0000\u0000\f\u0146\u0001\u0000\u0000\u0000\u000e\u0150"+ + "\u0001\u0000\u0000\u0000\u0010\u0157\u0001\u0000\u0000\u0000\u0012\u0160"+ + "\u0001\u0000\u0000\u0000\u0014\u0167\u0001\u0000\u0000\u0000\u0016\u0171"+ + "\u0001\u0000\u0000\u0000\u0018\u0178\u0001\u0000\u0000\u0000\u001a\u017f"+ + "\u0001\u0000\u0000\u0000\u001c\u018d\u0001\u0000\u0000\u0000\u001e\u0194"+ + "\u0001\u0000\u0000\u0000 \u019c\u0001\u0000\u0000\u0000\"\u01a3\u0001"+ + "\u0000\u0000\u0000$\u01af\u0001\u0000\u0000\u0000&\u01b8\u0001\u0000\u0000"+ + "\u0000(\u01be\u0001\u0000\u0000\u0000*\u01c5\u0001\u0000\u0000\u0000,"+ + "\u01cc\u0001\u0000\u0000\u0000.\u01d4\u0001\u0000\u0000\u00000\u01dd\u0001"+ + "\u0000\u0000\u00002\u01e3\u0001\u0000\u0000\u00004\u01f4\u0001\u0000\u0000"+ + "\u00006\u0204\u0001\u0000\u0000\u00008\u020a\u0001\u0000\u0000\u0000:"+ + "\u020f\u0001\u0000\u0000\u0000<\u0214\u0001\u0000\u0000\u0000>\u0218\u0001"+ + "\u0000\u0000\u0000@\u021c\u0001\u0000\u0000\u0000B\u0220\u0001\u0000\u0000"+ + "\u0000D\u0224\u0001\u0000\u0000\u0000F\u0226\u0001\u0000\u0000\u0000H"+ + "\u0228\u0001\u0000\u0000\u0000J\u022b\u0001\u0000\u0000\u0000L\u022d\u0001"+ + "\u0000\u0000\u0000N\u0236\u0001\u0000\u0000\u0000P\u0238\u0001\u0000\u0000"+ + "\u0000R\u023d\u0001\u0000\u0000\u0000T\u023f\u0001\u0000\u0000\u0000V"+ + "\u0244\u0001\u0000\u0000\u0000X\u0263\u0001\u0000\u0000\u0000Z\u0266\u0001"+ + "\u0000\u0000\u0000\\\u0294\u0001\u0000\u0000\u0000^\u0296\u0001\u0000"+ + "\u0000\u0000`\u0299\u0001\u0000\u0000\u0000b\u029d\u0001\u0000\u0000\u0000"+ + "d\u02a1\u0001\u0000\u0000\u0000f\u02a3\u0001\u0000\u0000\u0000h\u02a5"+ + "\u0001\u0000\u0000\u0000j\u02aa\u0001\u0000\u0000\u0000l\u02ac\u0001\u0000"+ + "\u0000\u0000n\u02b2\u0001\u0000\u0000\u0000p\u02b8\u0001\u0000\u0000\u0000"+ + "r\u02bd\u0001\u0000\u0000\u0000t\u02bf\u0001\u0000\u0000\u0000v\u02c2"+ + "\u0001\u0000\u0000\u0000x\u02c5\u0001\u0000\u0000\u0000z\u02ca\u0001\u0000"+ + "\u0000\u0000|\u02ce\u0001\u0000\u0000\u0000~\u02d3\u0001\u0000\u0000\u0000"+ + "\u0080\u02d9\u0001\u0000\u0000\u0000\u0082\u02dc\u0001\u0000\u0000\u0000"+ + "\u0084\u02de\u0001\u0000\u0000\u0000\u0086\u02e4\u0001\u0000\u0000\u0000"+ + "\u0088\u02e6\u0001\u0000\u0000\u0000\u008a\u02eb\u0001\u0000\u0000\u0000"+ + "\u008c\u02ee\u0001\u0000\u0000\u0000\u008e\u02f1\u0001\u0000\u0000\u0000"+ + "\u0090\u02f4\u0001\u0000\u0000\u0000\u0092\u02f6\u0001\u0000\u0000\u0000"+ + "\u0094\u02f9\u0001\u0000\u0000\u0000\u0096\u02fb\u0001\u0000\u0000\u0000"+ + "\u0098\u02fe\u0001\u0000\u0000\u0000\u009a\u0300\u0001\u0000\u0000\u0000"+ + "\u009c\u0302\u0001\u0000\u0000\u0000\u009e\u0304\u0001\u0000\u0000\u0000"+ + "\u00a0\u0306\u0001\u0000\u0000\u0000\u00a2\u0308\u0001\u0000\u0000\u0000"+ + "\u00a4\u030d\u0001\u0000\u0000\u0000\u00a6\u0322\u0001\u0000\u0000\u0000"+ + "\u00a8\u0324\u0001\u0000\u0000\u0000\u00aa\u032c\u0001\u0000\u0000\u0000"+ + "\u00ac\u032e\u0001\u0000\u0000\u0000\u00ae\u0332\u0001\u0000\u0000\u0000"+ + "\u00b0\u0336\u0001\u0000\u0000\u0000\u00b2\u033a\u0001\u0000\u0000\u0000"+ + "\u00b4\u033f\u0001\u0000\u0000\u0000\u00b6\u0343\u0001\u0000\u0000\u0000"+ + "\u00b8\u0347\u0001\u0000\u0000\u0000\u00ba\u034b\u0001\u0000\u0000\u0000"+ + "\u00bc\u034f\u0001\u0000\u0000\u0000\u00be\u035b\u0001\u0000\u0000\u0000"+ + "\u00c0\u035e\u0001\u0000\u0000\u0000\u00c2\u0362\u0001\u0000\u0000\u0000"+ + "\u00c4\u0366\u0001\u0000\u0000\u0000\u00c6\u036a\u0001\u0000\u0000\u0000"+ + "\u00c8\u036e\u0001\u0000\u0000\u0000\u00ca\u0372\u0001\u0000\u0000\u0000"+ + "\u00cc\u0377\u0001\u0000\u0000\u0000\u00ce\u037b\u0001\u0000\u0000\u0000"+ + "\u00d0\u0383\u0001\u0000\u0000\u0000\u00d2\u0398\u0001\u0000\u0000\u0000"+ + "\u00d4\u039c\u0001\u0000\u0000\u0000\u00d6\u03a0\u0001\u0000\u0000\u0000"+ + "\u00d8\u03a4\u0001\u0000\u0000\u0000\u00da\u03a8\u0001\u0000\u0000\u0000"+ + "\u00dc\u03ac\u0001\u0000\u0000\u0000\u00de\u03b1\u0001\u0000\u0000\u0000"+ + "\u00e0\u03b5\u0001\u0000\u0000\u0000\u00e2\u03b9\u0001\u0000\u0000\u0000"+ + "\u00e4\u03bd\u0001\u0000\u0000\u0000\u00e6\u03c0\u0001\u0000\u0000\u0000"+ + "\u00e8\u03c4\u0001\u0000\u0000\u0000\u00ea\u03c8\u0001\u0000\u0000\u0000"+ + "\u00ec\u03cc\u0001\u0000\u0000\u0000\u00ee\u03d0\u0001\u0000\u0000\u0000"+ + "\u00f0\u03d5\u0001\u0000\u0000\u0000\u00f2\u03da\u0001\u0000\u0000\u0000"+ + "\u00f4\u03df\u0001\u0000\u0000\u0000\u00f6\u03e6\u0001\u0000\u0000\u0000"+ + "\u00f8\u03ef\u0001\u0000\u0000\u0000\u00fa\u03f6\u0001\u0000\u0000\u0000"+ + "\u00fc\u03fa\u0001\u0000\u0000\u0000\u00fe\u03fe\u0001\u0000\u0000\u0000"+ + "\u0100\u0402\u0001\u0000\u0000\u0000\u0102\u0406\u0001\u0000\u0000\u0000"+ + "\u0104\u040a\u0001\u0000\u0000\u0000\u0106\u0410\u0001\u0000\u0000\u0000"+ + "\u0108\u0414\u0001\u0000\u0000\u0000\u010a\u0418\u0001\u0000\u0000\u0000"+ + "\u010c\u041c\u0001\u0000\u0000\u0000\u010e\u0420\u0001\u0000\u0000\u0000"+ + "\u0110\u0424\u0001\u0000\u0000\u0000\u0112\u0428\u0001\u0000\u0000\u0000"+ + "\u0114\u042c\u0001\u0000\u0000\u0000\u0116\u0430\u0001\u0000\u0000\u0000"+ + "\u0118\u0434\u0001\u0000\u0000\u0000\u011a\u0439\u0001\u0000\u0000\u0000"+ + "\u011c\u043d\u0001\u0000\u0000\u0000\u011e\u0441\u0001\u0000\u0000\u0000"+ + "\u0120\u0445\u0001\u0000\u0000\u0000\u0122\u0449\u0001\u0000\u0000\u0000"+ + "\u0124\u044d\u0001\u0000\u0000\u0000\u0126\u0451\u0001\u0000\u0000\u0000"+ + "\u0128\u0456\u0001\u0000\u0000\u0000\u012a\u045b\u0001\u0000\u0000\u0000"+ + "\u012c\u045f\u0001\u0000\u0000\u0000\u012e\u0463\u0001\u0000\u0000\u0000"+ + "\u0130\u0467\u0001\u0000\u0000\u0000\u0132\u046c\u0001\u0000\u0000\u0000"+ + "\u0134\u0476\u0001\u0000\u0000\u0000\u0136\u047a\u0001\u0000\u0000\u0000"+ + "\u0138\u047e\u0001\u0000\u0000\u0000\u013a\u0482\u0001\u0000\u0000\u0000"+ + "\u013c\u0487\u0001\u0000\u0000\u0000\u013e\u048e\u0001\u0000\u0000\u0000"+ + "\u0140\u0492\u0001\u0000\u0000\u0000\u0142\u0496\u0001\u0000\u0000\u0000"+ + "\u0144\u049a\u0001\u0000\u0000\u0000\u0146\u0147\u0005d\u0000\u0000\u0147"+ + "\u0148\u0005i\u0000\u0000\u0148\u0149\u0005s\u0000\u0000\u0149\u014a\u0005"+ + "s\u0000\u0000\u014a\u014b\u0005e\u0000\u0000\u014b\u014c\u0005c\u0000"+ + "\u0000\u014c\u014d\u0005t\u0000\u0000\u014d\u014e\u0001\u0000\u0000\u0000"+ + "\u014e\u014f\u0006\u0000\u0000\u0000\u014f\r\u0001\u0000\u0000\u0000\u0150"+ + "\u0151\u0005d\u0000\u0000\u0151\u0152\u0005r\u0000\u0000\u0152\u0153\u0005"+ + "o\u0000\u0000\u0153\u0154\u0005p\u0000\u0000\u0154\u0155\u0001\u0000\u0000"+ + "\u0000\u0155\u0156\u0006\u0001\u0001\u0000\u0156\u000f\u0001\u0000\u0000"+ + "\u0000\u0157\u0158\u0005e\u0000\u0000\u0158\u0159\u0005n\u0000\u0000\u0159"+ + "\u015a\u0005r\u0000\u0000\u015a\u015b\u0005i\u0000\u0000\u015b\u015c\u0005"+ + "c\u0000\u0000\u015c\u015d\u0005h\u0000\u0000\u015d\u015e\u0001\u0000\u0000"+ + "\u0000\u015e\u015f\u0006\u0002\u0002\u0000\u015f\u0011\u0001\u0000\u0000"+ + "\u0000\u0160\u0161\u0005e\u0000\u0000\u0161\u0162\u0005v\u0000\u0000\u0162"+ + "\u0163\u0005a\u0000\u0000\u0163\u0164\u0005l\u0000\u0000\u0164\u0165\u0001"+ + "\u0000\u0000\u0000\u0165\u0166\u0006\u0003\u0000\u0000\u0166\u0013\u0001"+ + "\u0000\u0000\u0000\u0167\u0168\u0005e\u0000\u0000\u0168\u0169\u0005x\u0000"+ + "\u0000\u0169\u016a\u0005p\u0000\u0000\u016a\u016b\u0005l\u0000\u0000\u016b"+ + "\u016c\u0005a\u0000\u0000\u016c\u016d\u0005i\u0000\u0000\u016d\u016e\u0005"+ + "n\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0170\u0006\u0004"+ + "\u0003\u0000\u0170\u0015\u0001\u0000\u0000\u0000\u0171\u0172\u0005f\u0000"+ + "\u0000\u0172\u0173\u0005r\u0000\u0000\u0173\u0174\u0005o\u0000\u0000\u0174"+ + "\u0175\u0005m\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u0176\u0177"+ + "\u0006\u0005\u0004\u0000\u0177\u0017\u0001\u0000\u0000\u0000\u0178\u0179"+ + "\u0005g\u0000\u0000\u0179\u017a\u0005r\u0000\u0000\u017a\u017b\u0005o"+ + "\u0000\u0000\u017b\u017c\u0005k\u0000\u0000\u017c\u017d\u0001\u0000\u0000"+ + "\u0000\u017d\u017e\u0006\u0006\u0000\u0000\u017e\u0019\u0001\u0000\u0000"+ + "\u0000\u017f\u0180\u0005i\u0000\u0000\u0180\u0181\u0005n\u0000\u0000\u0181"+ + "\u0182\u0005l\u0000\u0000\u0182\u0183\u0005i\u0000\u0000\u0183\u0184\u0005"+ + "n\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005s\u0000"+ + "\u0000\u0186\u0187\u0005t\u0000\u0000\u0187\u0188\u0005a\u0000\u0000\u0188"+ + "\u0189\u0005t\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0001"+ + "\u0000\u0000\u0000\u018b\u018c\u0006\u0007\u0000\u0000\u018c\u001b\u0001"+ + "\u0000\u0000\u0000\u018d\u018e\u0005k\u0000\u0000\u018e\u018f\u0005e\u0000"+ + "\u0000\u018f\u0190\u0005e\u0000\u0000\u0190\u0191\u0005p\u0000\u0000\u0191"+ + "\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\b\u0001\u0000\u0193\u001d"+ + "\u0001\u0000\u0000\u0000\u0194\u0195\u0005l\u0000\u0000\u0195\u0196\u0005"+ + "i\u0000\u0000\u0196\u0197\u0005m\u0000\u0000\u0197\u0198\u0005i\u0000"+ + "\u0000\u0198\u0199\u0005t\u0000\u0000\u0199\u019a\u0001\u0000\u0000\u0000"+ + "\u019a\u019b\u0006\t\u0000\u0000\u019b\u001f\u0001\u0000\u0000\u0000\u019c"+ + "\u019d\u0005m\u0000\u0000\u019d\u019e\u0005e\u0000\u0000\u019e\u019f\u0005"+ + "t\u0000\u0000\u019f\u01a0\u0005a\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a1\u01a2\u0006\n\u0005\u0000\u01a2!\u0001\u0000\u0000\u0000"+ + "\u01a3\u01a4\u0005m\u0000\u0000\u01a4\u01a5\u0005v\u0000\u0000\u01a5\u01a6"+ + "\u0005_\u0000\u0000\u01a6\u01a7\u0005e\u0000\u0000\u01a7\u01a8\u0005x"+ + "\u0000\u0000\u01a8\u01a9\u0005p\u0000\u0000\u01a9\u01aa\u0005a\u0000\u0000"+ + "\u01aa\u01ab\u0005n\u0000\u0000\u01ab\u01ac\u0005d\u0000\u0000\u01ac\u01ad"+ + "\u0001\u0000\u0000\u0000\u01ad\u01ae\u0006\u000b\u0006\u0000\u01ae#\u0001"+ + "\u0000\u0000\u0000\u01af\u01b0\u0005r\u0000\u0000\u01b0\u01b1\u0005e\u0000"+ + "\u0000\u01b1\u01b2\u0005n\u0000\u0000\u01b2\u01b3\u0005a\u0000\u0000\u01b3"+ + "\u01b4\u0005m\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6\u0001"+ + "\u0000\u0000\u0000\u01b6\u01b7\u0006\f\u0007\u0000\u01b7%\u0001\u0000"+ + "\u0000\u0000\u01b8\u01b9\u0005r\u0000\u0000\u01b9\u01ba\u0005o\u0000\u0000"+ + "\u01ba\u01bb\u0005w\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc"+ + "\u01bd\u0006\r\u0000\u0000\u01bd\'\u0001\u0000\u0000\u0000\u01be\u01bf"+ + "\u0005s\u0000\u0000\u01bf\u01c0\u0005h\u0000\u0000\u01c0\u01c1\u0005o"+ + "\u0000\u0000\u01c1\u01c2\u0005w\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000"+ + "\u0000\u01c3\u01c4\u0006\u000e\b\u0000\u01c4)\u0001\u0000\u0000\u0000"+ + "\u01c5\u01c6\u0005s\u0000\u0000\u01c6\u01c7\u0005o\u0000\u0000\u01c7\u01c8"+ + "\u0005r\u0000\u0000\u01c8\u01c9\u0005t\u0000\u0000\u01c9\u01ca\u0001\u0000"+ + "\u0000\u0000\u01ca\u01cb\u0006\u000f\u0000\u0000\u01cb+\u0001\u0000\u0000"+ + "\u0000\u01cc\u01cd\u0005s\u0000\u0000\u01cd\u01ce\u0005t\u0000\u0000\u01ce"+ + "\u01cf\u0005a\u0000\u0000\u01cf\u01d0\u0005t\u0000\u0000\u01d0\u01d1\u0005"+ + "s\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0010"+ + "\u0000\u0000\u01d3-\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005w\u0000\u0000"+ + "\u01d5\u01d6\u0005h\u0000\u0000\u01d6\u01d7\u0005e\u0000\u0000\u01d7\u01d8"+ + "\u0005r\u0000\u0000\u01d8\u01d9\u0005e\u0000\u0000\u01d9\u01da\u0001\u0000"+ + "\u0000\u0000\u01da\u01db\u0006\u0011\u0000\u0000\u01db/\u0001\u0000\u0000"+ + "\u0000\u01dc\u01de\b\u0000\u0000\u0000\u01dd\u01dc\u0001\u0000\u0000\u0000"+ + "\u01de\u01df\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000"+ + "\u01df\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000"+ + "\u01e1\u01e2\u0006\u0012\u0000\u0000\u01e21\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0005/\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000\u01e5\u01e9\u0001"+ + "\u0000\u0000\u0000\u01e6\u01e8\b\u0001\u0000\u0000\u01e7\u01e6\u0001\u0000"+ + "\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ed\u0001\u0000"+ + "\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ee\u0005\r\u0000"+ + "\u0000\u01ed\u01ec\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000"+ + "\u0000\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01f1\u0005\n\u0000\u0000"+ + "\u01f0\u01ef\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3\u0006\u0013\t\u0000\u01f3"+ + "3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005/\u0000\u0000\u01f5\u01f6\u0005"+ + "*\u0000\u0000\u01f6\u01fb\u0001\u0000\u0000\u0000\u01f7\u01fa\u00034\u0014"+ + "\u0000\u01f8\u01fa\t\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000"+ + "\u01f9\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000"+ + "\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fe\u01ff\u0005*\u0000\u0000\u01ff\u0200\u0005/\u0000\u0000\u0200\u0201"+ + "\u0001\u0000\u0000\u0000\u0201\u0202\u0006\u0014\t\u0000\u02025\u0001"+ + "\u0000\u0000\u0000\u0203\u0205\u0007\u0002\u0000\u0000\u0204\u0203\u0001"+ + "\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0204\u0001"+ + "\u0000\u0000\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0006\u0015\t\u0000\u02097\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0003\u00a2K\u0000\u020b\u020c\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u0006\u0016\n\u0000\u020d\u020e\u0006\u0016\u000b\u0000"+ + "\u020e9\u0001\u0000\u0000\u0000\u020f\u0210\u0003B\u001b\u0000\u0210\u0211"+ + "\u0001\u0000\u0000\u0000\u0211\u0212\u0006\u0017\f\u0000\u0212\u0213\u0006"+ + "\u0017\r\u0000\u0213;\u0001\u0000\u0000\u0000\u0214\u0215\u00036\u0015"+ + "\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216\u0217\u0006\u0018\t\u0000"+ + "\u0217=\u0001\u0000\u0000\u0000\u0218\u0219\u00032\u0013\u0000\u0219\u021a"+ + "\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0019\t\u0000\u021b?\u0001"+ + "\u0000\u0000\u0000\u021c\u021d\u00034\u0014\u0000\u021d\u021e\u0001\u0000"+ + "\u0000\u0000\u021e\u021f\u0006\u001a\t\u0000\u021fA\u0001\u0000\u0000"+ + "\u0000\u0220\u0221\u0005|\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000"+ + "\u0222\u0223\u0006\u001b\r\u0000\u0223C\u0001\u0000\u0000\u0000\u0224"+ + "\u0225\u0007\u0003\u0000\u0000\u0225E\u0001\u0000\u0000\u0000\u0226\u0227"+ + "\u0007\u0004\u0000\u0000\u0227G\u0001\u0000\u0000\u0000\u0228\u0229\u0005"+ + "\\\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000\u022aI\u0001\u0000"+ + "\u0000\u0000\u022b\u022c\b\u0006\u0000\u0000\u022cK\u0001\u0000\u0000"+ + "\u0000\u022d\u022f\u0007\u0007\u0000\u0000\u022e\u0230\u0007\b\u0000\u0000"+ + "\u022f\u022e\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000\u0000\u0000"+ + "\u0230\u0232\u0001\u0000\u0000\u0000\u0231\u0233\u0003D\u001c\u0000\u0232"+ + "\u0231\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000\u0234"+ + "\u0232\u0001\u0000\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235"+ + "M\u0001\u0000\u0000\u0000\u0236\u0237\u0005@\u0000\u0000\u0237O\u0001"+ + "\u0000\u0000\u0000\u0238\u0239\u0005`\u0000\u0000\u0239Q\u0001\u0000\u0000"+ + "\u0000\u023a\u023e\b\t\u0000\u0000\u023b\u023c\u0005`\u0000\u0000\u023c"+ + "\u023e\u0005`\u0000\u0000\u023d\u023a\u0001\u0000\u0000\u0000\u023d\u023b"+ + "\u0001\u0000\u0000\u0000\u023eS\u0001\u0000\u0000\u0000\u023f\u0240\u0005"+ + "_\u0000\u0000\u0240U\u0001\u0000\u0000\u0000\u0241\u0245\u0003F\u001d"+ + "\u0000\u0242\u0245\u0003D\u001c\u0000\u0243\u0245\u0003T$\u0000\u0244"+ + "\u0241\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244"+ + "\u0243\u0001\u0000\u0000\u0000\u0245W\u0001\u0000\u0000\u0000\u0246\u024b"+ + "\u0005\"\u0000\u0000\u0247\u024a\u0003H\u001e\u0000\u0248\u024a\u0003"+ + "J\u001f\u0000\u0249\u0247\u0001\u0000\u0000\u0000\u0249\u0248\u0001\u0000"+ + "\u0000\u0000\u024a\u024d\u0001\u0000\u0000\u0000\u024b\u0249\u0001\u0000"+ + "\u0000\u0000\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024e\u0001\u0000"+ + "\u0000\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024e\u0264\u0005\"\u0000"+ + "\u0000\u024f\u0250\u0005\"\u0000\u0000\u0250\u0251\u0005\"\u0000\u0000"+ + "\u0251\u0252\u0005\"\u0000\u0000\u0252\u0256\u0001\u0000\u0000\u0000\u0253"+ + "\u0255\b\u0001\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0255\u0258"+ + "\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0256\u0254"+ + "\u0001\u0000\u0000\u0000\u0257\u0259\u0001\u0000\u0000\u0000\u0258\u0256"+ + "\u0001\u0000\u0000\u0000\u0259\u025a\u0005\"\u0000\u0000\u025a\u025b\u0005"+ + "\"\u0000\u0000\u025b\u025c\u0005\"\u0000\u0000\u025c\u025e\u0001\u0000"+ + "\u0000\u0000\u025d\u025f\u0005\"\u0000\u0000\u025e\u025d\u0001\u0000\u0000"+ + "\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0261\u0001\u0000\u0000"+ + "\u0000\u0260\u0262\u0005\"\u0000\u0000\u0261\u0260\u0001\u0000\u0000\u0000"+ + "\u0261\u0262\u0001\u0000\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ + "\u0263\u0246\u0001\u0000\u0000\u0000\u0263\u024f\u0001\u0000\u0000\u0000"+ + "\u0264Y\u0001\u0000\u0000\u0000\u0265\u0267\u0003D\u001c\u0000\u0266\u0265"+ + "\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0266"+ + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269[\u0001"+ + "\u0000\u0000\u0000\u026a\u026c\u0003D\u001c\u0000\u026b\u026a\u0001\u0000"+ + "\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026b\u0001\u0000"+ + "\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000"+ + "\u0000\u0000\u026f\u0273\u0003j/\u0000\u0270\u0272\u0003D\u001c\u0000"+ + "\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001\u0000\u0000\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ + "\u0274\u0295\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000"+ + "\u0276\u0278\u0003j/\u0000\u0277\u0279\u0003D\u001c\u0000\u0278\u0277"+ + "\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u0278"+ + "\u0001\u0000\u0000\u0000\u027a\u027b\u0001\u0000\u0000\u0000\u027b\u0295"+ + "\u0001\u0000\u0000\u0000\u027c\u027e\u0003D\u001c\u0000\u027d\u027c\u0001"+ + "\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u027d\u0001"+ + "\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0288\u0001"+ + "\u0000\u0000\u0000\u0281\u0285\u0003j/\u0000\u0282\u0284\u0003D\u001c"+ + "\u0000\u0283\u0282\u0001\u0000\u0000\u0000\u0284\u0287\u0001\u0000\u0000"+ + "\u0000\u0285\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000"+ + "\u0000\u0286\u0289\u0001\u0000\u0000\u0000\u0287\u0285\u0001\u0000\u0000"+ + "\u0000\u0288\u0281\u0001\u0000\u0000\u0000\u0288\u0289\u0001\u0000\u0000"+ + "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0003L \u0000\u028b"+ + "\u0295\u0001\u0000\u0000\u0000\u028c\u028e\u0003j/\u0000\u028d\u028f\u0003"+ + "D\u001c\u0000\u028e\u028d\u0001\u0000\u0000\u0000\u028f\u0290\u0001\u0000"+ + "\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000"+ + "\u0000\u0000\u0291\u0292\u0001\u0000\u0000\u0000\u0292\u0293\u0003L \u0000"+ + "\u0293\u0295\u0001\u0000\u0000\u0000\u0294\u026b\u0001\u0000\u0000\u0000"+ + "\u0294\u0276\u0001\u0000\u0000\u0000\u0294\u027d\u0001\u0000\u0000\u0000"+ + "\u0294\u028c\u0001\u0000\u0000\u0000\u0295]\u0001\u0000\u0000\u0000\u0296"+ + "\u0297\u0005b\u0000\u0000\u0297\u0298\u0005y\u0000\u0000\u0298_\u0001"+ + "\u0000\u0000\u0000\u0299\u029a\u0005a\u0000\u0000\u029a\u029b\u0005n\u0000"+ + "\u0000\u029b\u029c\u0005d\u0000\u0000\u029ca\u0001\u0000\u0000\u0000\u029d"+ + "\u029e\u0005a\u0000\u0000\u029e\u029f\u0005s\u0000\u0000\u029f\u02a0\u0005"+ + "c\u0000\u0000\u02a0c\u0001\u0000\u0000\u0000\u02a1\u02a2\u0005=\u0000"+ + "\u0000\u02a2e\u0001\u0000\u0000\u0000\u02a3\u02a4\u0005,\u0000\u0000\u02a4"+ + "g\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005d\u0000\u0000\u02a6\u02a7\u0005"+ + "e\u0000\u0000\u02a7\u02a8\u0005s\u0000\u0000\u02a8\u02a9\u0005c\u0000"+ + "\u0000\u02a9i\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005.\u0000\u0000\u02ab"+ + "k\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005f\u0000\u0000\u02ad\u02ae\u0005"+ + "a\u0000\u0000\u02ae\u02af\u0005l\u0000\u0000\u02af\u02b0\u0005s\u0000"+ + "\u0000\u02b0\u02b1\u0005e\u0000\u0000\u02b1m\u0001\u0000\u0000\u0000\u02b2"+ + "\u02b3\u0005f\u0000\u0000\u02b3\u02b4\u0005i\u0000\u0000\u02b4\u02b5\u0005"+ + "r\u0000\u0000\u02b5\u02b6\u0005s\u0000\u0000\u02b6\u02b7\u0005t\u0000"+ + "\u0000\u02b7o\u0001\u0000\u0000\u0000\u02b8\u02b9\u0005l\u0000\u0000\u02b9"+ + "\u02ba\u0005a\u0000\u0000\u02ba\u02bb\u0005s\u0000\u0000\u02bb\u02bc\u0005"+ + "t\u0000\u0000\u02bcq\u0001\u0000\u0000\u0000\u02bd\u02be\u0005(\u0000"+ + "\u0000\u02bes\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005i\u0000\u0000\u02c0"+ + "\u02c1\u0005n\u0000\u0000\u02c1u\u0001\u0000\u0000\u0000\u02c2\u02c3\u0005"+ + "i\u0000\u0000\u02c3\u02c4\u0005s\u0000\u0000\u02c4w\u0001\u0000\u0000"+ + "\u0000\u02c5\u02c6\u0005l\u0000\u0000\u02c6\u02c7\u0005i\u0000\u0000\u02c7"+ + "\u02c8\u0005k\u0000\u0000\u02c8\u02c9\u0005e\u0000\u0000\u02c9y\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cb\u0005n\u0000\u0000\u02cb\u02cc\u0005o\u0000"+ + "\u0000\u02cc\u02cd\u0005t\u0000\u0000\u02cd{\u0001\u0000\u0000\u0000\u02ce"+ + "\u02cf\u0005n\u0000\u0000\u02cf\u02d0\u0005u\u0000\u0000\u02d0\u02d1\u0005"+ + "l\u0000\u0000\u02d1\u02d2\u0005l\u0000\u0000\u02d2}\u0001\u0000\u0000"+ + "\u0000\u02d3\u02d4\u0005n\u0000\u0000\u02d4\u02d5\u0005u\u0000\u0000\u02d5"+ + "\u02d6\u0005l\u0000\u0000\u02d6\u02d7\u0005l\u0000\u0000\u02d7\u02d8\u0005"+ + "s\u0000\u0000\u02d8\u007f\u0001\u0000\u0000\u0000\u02d9\u02da\u0005o\u0000"+ + "\u0000\u02da\u02db\u0005r\u0000\u0000\u02db\u0081\u0001\u0000\u0000\u0000"+ + "\u02dc\u02dd\u0005?\u0000\u0000\u02dd\u0083\u0001\u0000\u0000\u0000\u02de"+ + "\u02df\u0005r\u0000\u0000\u02df\u02e0\u0005l\u0000\u0000\u02e0\u02e1\u0005"+ + "i\u0000\u0000\u02e1\u02e2\u0005k\u0000\u0000\u02e2\u02e3\u0005e\u0000"+ + "\u0000\u02e3\u0085\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005)\u0000\u0000"+ + "\u02e5\u0087\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005t\u0000\u0000\u02e7"+ + "\u02e8\u0005r\u0000\u0000\u02e8\u02e9\u0005u\u0000\u0000\u02e9\u02ea\u0005"+ + "e\u0000\u0000\u02ea\u0089\u0001\u0000\u0000\u0000\u02eb\u02ec\u0005=\u0000"+ + "\u0000\u02ec\u02ed\u0005=\u0000\u0000\u02ed\u008b\u0001\u0000\u0000\u0000"+ + "\u02ee\u02ef\u0005=\u0000\u0000\u02ef\u02f0\u0005~\u0000\u0000\u02f0\u008d"+ + "\u0001\u0000\u0000\u0000\u02f1\u02f2\u0005!\u0000\u0000\u02f2\u02f3\u0005"+ + "=\u0000\u0000\u02f3\u008f\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005<\u0000"+ + "\u0000\u02f5\u0091\u0001\u0000\u0000\u0000\u02f6\u02f7\u0005<\u0000\u0000"+ + "\u02f7\u02f8\u0005=\u0000\u0000\u02f8\u0093\u0001\u0000\u0000\u0000\u02f9"+ + "\u02fa\u0005>\u0000\u0000\u02fa\u0095\u0001\u0000\u0000\u0000\u02fb\u02fc"+ + "\u0005>\u0000\u0000\u02fc\u02fd\u0005=\u0000\u0000\u02fd\u0097\u0001\u0000"+ + "\u0000\u0000\u02fe\u02ff\u0005+\u0000\u0000\u02ff\u0099\u0001\u0000\u0000"+ + "\u0000\u0300\u0301\u0005-\u0000\u0000\u0301\u009b\u0001\u0000\u0000\u0000"+ + "\u0302\u0303\u0005*\u0000\u0000\u0303\u009d\u0001\u0000\u0000\u0000\u0304"+ + "\u0305\u0005/\u0000\u0000\u0305\u009f\u0001\u0000\u0000\u0000\u0306\u0307"+ + "\u0005%\u0000\u0000\u0307\u00a1\u0001\u0000\u0000\u0000\u0308\u0309\u0005"+ + "[\u0000\u0000\u0309\u030a\u0001\u0000\u0000\u0000\u030a\u030b\u0006K\u0000"+ + "\u0000\u030b\u030c\u0006K\u0000\u0000\u030c\u00a3\u0001\u0000\u0000\u0000"+ + "\u030d\u030e\u0005]\u0000\u0000\u030e\u030f\u0001\u0000\u0000\u0000\u030f"+ + "\u0310\u0006L\r\u0000\u0310\u0311\u0006L\r\u0000\u0311\u00a5\u0001\u0000"+ + "\u0000\u0000\u0312\u0316\u0003F\u001d\u0000\u0313\u0315\u0003V%\u0000"+ + "\u0314\u0313\u0001\u0000\u0000\u0000\u0315\u0318\u0001\u0000\u0000\u0000"+ + "\u0316\u0314\u0001\u0000\u0000\u0000\u0316\u0317\u0001\u0000\u0000\u0000"+ + "\u0317\u0323\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000"+ + "\u0319\u031c\u0003T$\u0000\u031a\u031c\u0003N!\u0000\u031b\u0319\u0001"+ + "\u0000\u0000\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031e\u0001"+ + "\u0000\u0000\u0000\u031d\u031f\u0003V%\u0000\u031e\u031d\u0001\u0000\u0000"+ + "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u031e\u0001\u0000\u0000"+ + "\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0323\u0001\u0000\u0000"+ + "\u0000\u0322\u0312\u0001\u0000\u0000\u0000\u0322\u031b\u0001\u0000\u0000"+ + "\u0000\u0323\u00a7\u0001\u0000\u0000\u0000\u0324\u0326\u0003P\"\u0000"+ + "\u0325\u0327\u0003R#\u0000\u0326\u0325\u0001\u0000\u0000\u0000\u0327\u0328"+ + "\u0001\u0000\u0000\u0000\u0328\u0326\u0001\u0000\u0000\u0000\u0328\u0329"+ + "\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000\u0000\u032a\u032b"+ + "\u0003P\"\u0000\u032b\u00a9\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "\u00a8N\u0000\u032d\u00ab\u0001\u0000\u0000\u0000\u032e\u032f\u00032\u0013"+ + "\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0006P\t\u0000"+ + "\u0331\u00ad\u0001\u0000\u0000\u0000\u0332\u0333\u00034\u0014\u0000\u0333"+ + "\u0334\u0001\u0000\u0000\u0000\u0334\u0335\u0006Q\t\u0000\u0335\u00af"+ + "\u0001\u0000\u0000\u0000\u0336\u0337\u00036\u0015\u0000\u0337\u0338\u0001"+ + "\u0000\u0000\u0000\u0338\u0339\u0006R\t\u0000\u0339\u00b1\u0001\u0000"+ + "\u0000\u0000\u033a\u033b\u0003B\u001b\u0000\u033b\u033c\u0001\u0000\u0000"+ + "\u0000\u033c\u033d\u0006S\f\u0000\u033d\u033e\u0006S\r\u0000\u033e\u00b3"+ + "\u0001\u0000\u0000\u0000\u033f\u0340\u0003\u00a2K\u0000\u0340\u0341\u0001"+ + "\u0000\u0000\u0000\u0341\u0342\u0006T\n\u0000\u0342\u00b5\u0001\u0000"+ + "\u0000\u0000\u0343\u0344\u0003\u00a4L\u0000\u0344\u0345\u0001\u0000\u0000"+ + "\u0000\u0345\u0346\u0006U\u000e\u0000\u0346\u00b7\u0001\u0000\u0000\u0000"+ + "\u0347\u0348\u0003f-\u0000\u0348\u0349\u0001\u0000\u0000\u0000\u0349\u034a"+ + "\u0006V\u000f\u0000\u034a\u00b9\u0001\u0000\u0000\u0000\u034b\u034c\u0003"+ + "d,\u0000\u034c\u034d\u0001\u0000\u0000\u0000\u034d\u034e\u0006W\u0010"+ + "\u0000\u034e\u00bb\u0001\u0000\u0000\u0000\u034f\u0350\u0005m\u0000\u0000"+ + "\u0350\u0351\u0005e\u0000\u0000\u0351\u0352\u0005t\u0000\u0000\u0352\u0353"+ + "\u0005a\u0000\u0000\u0353\u0354\u0005d\u0000\u0000\u0354\u0355\u0005a"+ + "\u0000\u0000\u0355\u0356\u0005t\u0000\u0000\u0356\u0357\u0005a\u0000\u0000"+ + "\u0357\u00bd\u0001\u0000\u0000\u0000\u0358\u035c\b\n\u0000\u0000\u0359"+ + "\u035a\u0005/\u0000\u0000\u035a\u035c\b\u000b\u0000\u0000\u035b\u0358"+ + "\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000\u0000\u035c\u00bf"+ + "\u0001\u0000\u0000\u0000\u035d\u035f\u0003\u00beY\u0000\u035e\u035d\u0001"+ + "\u0000\u0000\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u035e\u0001"+ + "\u0000\u0000\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u00c1\u0001"+ + "\u0000\u0000\u0000\u0362\u0363\u0003\u00aaO\u0000\u0363\u0364\u0001\u0000"+ + "\u0000\u0000\u0364\u0365\u0006[\u0011\u0000\u0365\u00c3\u0001\u0000\u0000"+ + "\u0000\u0366\u0367\u00032\u0013\u0000\u0367\u0368\u0001\u0000\u0000\u0000"+ + "\u0368\u0369\u0006\\\t\u0000\u0369\u00c5\u0001\u0000\u0000\u0000\u036a"+ + "\u036b\u00034\u0014\u0000\u036b\u036c\u0001\u0000\u0000\u0000\u036c\u036d"+ + "\u0006]\t\u0000\u036d\u00c7\u0001\u0000\u0000\u0000\u036e\u036f\u0003"+ + "6\u0015\u0000\u036f\u0370\u0001\u0000\u0000\u0000\u0370\u0371\u0006^\t"+ + "\u0000\u0371\u00c9\u0001\u0000\u0000\u0000\u0372\u0373\u0003B\u001b\u0000"+ + "\u0373\u0374\u0001\u0000\u0000\u0000\u0374\u0375\u0006_\f\u0000\u0375"+ + "\u0376\u0006_\r\u0000\u0376\u00cb\u0001\u0000\u0000\u0000\u0377\u0378"+ + "\u0003j/\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a\u0006`"+ + "\u0012\u0000\u037a\u00cd\u0001\u0000\u0000\u0000\u037b\u037c\u0003f-\u0000"+ + "\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006a\u000f\u0000\u037e"+ + "\u00cf\u0001\u0000\u0000\u0000\u037f\u0384\u0003F\u001d\u0000\u0380\u0384"+ + "\u0003D\u001c\u0000\u0381\u0384\u0003T$\u0000\u0382\u0384\u0003\u009c"+ + "H\u0000\u0383\u037f\u0001\u0000\u0000\u0000\u0383\u0380\u0001\u0000\u0000"+ + "\u0000\u0383\u0381\u0001\u0000\u0000\u0000\u0383\u0382\u0001\u0000\u0000"+ + "\u0000\u0384\u00d1\u0001\u0000\u0000\u0000\u0385\u0388\u0003F\u001d\u0000"+ + "\u0386\u0388\u0003\u009cH\u0000\u0387\u0385\u0001\u0000\u0000\u0000\u0387"+ + "\u0386\u0001\u0000\u0000\u0000\u0388\u038c\u0001\u0000\u0000\u0000\u0389"+ + "\u038b\u0003\u00d0b\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038e"+ "\u0001\u0000\u0000\u0000\u038c\u038a\u0001\u0000\u0000\u0000\u038c\u038d"+ - "\u0001\u0000\u0000\u0000\u038d\u00d2\u0001\u0000\u0000\u0000\u038e\u038f"+ - "\u0003/\u0012\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006"+ - "d\b\u0000\u0391\u00d4\u0001\u0000\u0000\u0000\u0392\u0393\u00031\u0013"+ - "\u0000\u0393\u0394\u0001\u0000\u0000\u0000\u0394\u0395\u0006e\b\u0000"+ - "\u0395\u00d6\u0001\u0000\u0000\u0000\u0396\u0397\u00033\u0014\u0000\u0397"+ - "\u0398\u0001\u0000\u0000\u0000\u0398\u0399\u0006f\b\u0000\u0399\u00d8"+ - "\u0001\u0000\u0000\u0000\u039a\u039b\u0003?\u001a\u0000\u039b\u039c\u0001"+ - "\u0000\u0000\u0000\u039c\u039d\u0006g\u000b\u0000\u039d\u039e\u0006g\f"+ - "\u0000\u039e\u00da\u0001\u0000\u0000\u0000\u039f\u03a0\u0003a+\u0000\u03a0"+ - "\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006h\u000f\u0000\u03a2\u00dc"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a4\u0003c,\u0000\u03a4\u03a5\u0001\u0000"+ - "\u0000\u0000\u03a5\u03a6\u0006i\u000e\u0000\u03a6\u00de\u0001\u0000\u0000"+ - "\u0000\u03a7\u03a8\u0003g.\u0000\u03a8\u03a9\u0001\u0000\u0000\u0000\u03a9"+ - "\u03aa\u0006j\u0011\u0000\u03aa\u00e0\u0001\u0000\u0000\u0000\u03ab\u03ac"+ - "\u0005a\u0000\u0000\u03ac\u03ad\u0005s\u0000\u0000\u03ad\u00e2\u0001\u0000"+ - "\u0000\u0000\u03ae\u03af\u0003\u00d1c\u0000\u03af\u03b0\u0001\u0000\u0000"+ - "\u0000\u03b0\u03b1\u0006l\u0012\u0000\u03b1\u00e4\u0001\u0000\u0000\u0000"+ - "\u03b2\u03b3\u0003/\u0012\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4"+ - "\u03b5\u0006m\b\u0000\u03b5\u00e6\u0001\u0000\u0000\u0000\u03b6\u03b7"+ - "\u00031\u0013\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006"+ - "n\b\u0000\u03b9\u00e8\u0001\u0000\u0000\u0000\u03ba\u03bb\u00033\u0014"+ - "\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006o\b\u0000"+ - "\u03bd\u00ea\u0001\u0000\u0000\u0000\u03be\u03bf\u0003?\u001a\u0000\u03bf"+ - "\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006p\u000b\u0000\u03c1\u03c2"+ - "\u0006p\f\u0000\u03c2\u00ec\u0001\u0000\u0000\u0000\u03c3\u03c4\u0003"+ - "\u009fJ\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006q\t"+ - "\u0000\u03c6\u03c7\u0006q\u0013\u0000\u03c7\u00ee\u0001\u0000\u0000\u0000"+ - "\u03c8\u03c9\u0005o\u0000\u0000\u03c9\u03ca\u0005n\u0000\u0000\u03ca\u03cb"+ - "\u0001\u0000\u0000\u0000\u03cb\u03cc\u0006r\u0014\u0000\u03cc\u00f0\u0001"+ - "\u0000\u0000\u0000\u03cd\u03ce\u0005w\u0000\u0000\u03ce\u03cf\u0005i\u0000"+ - "\u0000\u03cf\u03d0\u0005t\u0000\u0000\u03d0\u03d1\u0005h\u0000\u0000\u03d1"+ - "\u03d2\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006s\u0014\u0000\u03d3\u00f2"+ - "\u0001\u0000\u0000\u0000\u03d4\u03d5\b\f\u0000\u0000\u03d5\u00f4\u0001"+ - "\u0000\u0000\u0000\u03d6\u03d8\u0003\u00f3t\u0000\u03d7\u03d6\u0001\u0000"+ - "\u0000\u0000\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9\u03d7\u0001\u0000"+ - "\u0000\u0000\u03d9\u03da\u0001\u0000\u0000\u0000\u03da\u03db\u0001\u0000"+ - "\u0000\u0000\u03db\u03dc\u0003\u0131\u0093\u0000\u03dc\u03de\u0001\u0000"+ - "\u0000\u0000\u03dd\u03d7\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000"+ - "\u0000\u0000\u03de\u03e0\u0001\u0000\u0000\u0000\u03df\u03e1\u0003\u00f3"+ - "t\u0000\u03e0\u03df\u0001\u0000\u0000\u0000\u03e1\u03e2\u0001\u0000\u0000"+ - "\u0000\u03e2\u03e0\u0001\u0000\u0000\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u00f6\u0001\u0000\u0000\u0000\u03e4\u03e5\u0003\u00a7N\u0000"+ - "\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6\u03e7\u0006v\u0010\u0000\u03e7"+ - "\u00f8\u0001\u0000\u0000\u0000\u03e8\u03e9\u0003\u00f5u\u0000\u03e9\u03ea"+ - "\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006w\u0015\u0000\u03eb\u00fa\u0001"+ - "\u0000\u0000\u0000\u03ec\u03ed\u0003/\u0012\u0000\u03ed\u03ee\u0001\u0000"+ - "\u0000\u0000\u03ee\u03ef\u0006x\b\u0000\u03ef\u00fc\u0001\u0000\u0000"+ - "\u0000\u03f0\u03f1\u00031\u0013\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000"+ - "\u03f2\u03f3\u0006y\b\u0000\u03f3\u00fe\u0001\u0000\u0000\u0000\u03f4"+ - "\u03f5\u00033\u0014\u0000\u03f5\u03f6\u0001\u0000\u0000\u0000\u03f6\u03f7"+ - "\u0006z\b\u0000\u03f7\u0100\u0001\u0000\u0000\u0000\u03f8\u03f9\u0003"+ - "?\u001a\u0000\u03f9\u03fa\u0001\u0000\u0000\u0000\u03fa\u03fb\u0006{\u000b"+ - "\u0000\u03fb\u03fc\u0006{\f\u0000\u03fc\u03fd\u0006{\f\u0000\u03fd\u0102"+ - "\u0001\u0000\u0000\u0000\u03fe\u03ff\u0003a+\u0000\u03ff\u0400\u0001\u0000"+ - "\u0000\u0000\u0400\u0401\u0006|\u000f\u0000\u0401\u0104\u0001\u0000\u0000"+ - "\u0000\u0402\u0403\u0003c,\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ - "\u0405\u0006}\u000e\u0000\u0405\u0106\u0001\u0000\u0000\u0000\u0406\u0407"+ - "\u0003g.\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006~"+ - "\u0011\u0000\u0409\u0108\u0001\u0000\u0000\u0000\u040a\u040b\u0003\u00f1"+ - "s\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\u007f\u0016"+ - "\u0000\u040d\u010a\u0001\u0000\u0000\u0000\u040e\u040f\u0003\u00d1c\u0000"+ - "\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006\u0080\u0012\u0000"+ - "\u0411\u010c\u0001\u0000\u0000\u0000\u0412\u0413\u0003\u00a7N\u0000\u0413"+ - "\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006\u0081\u0010\u0000\u0415"+ - "\u010e\u0001\u0000\u0000\u0000\u0416\u0417\u0003/\u0012\u0000\u0417\u0418"+ - "\u0001\u0000\u0000\u0000\u0418\u0419\u0006\u0082\b\u0000\u0419\u0110\u0001"+ - "\u0000\u0000\u0000\u041a\u041b\u00031\u0013\u0000\u041b\u041c\u0001\u0000"+ - "\u0000\u0000\u041c\u041d\u0006\u0083\b\u0000\u041d\u0112\u0001\u0000\u0000"+ - "\u0000\u041e\u041f\u00033\u0014\u0000\u041f\u0420\u0001\u0000\u0000\u0000"+ - "\u0420\u0421\u0006\u0084\b\u0000\u0421\u0114\u0001\u0000\u0000\u0000\u0422"+ - "\u0423\u0003?\u001a\u0000\u0423\u0424\u0001\u0000\u0000\u0000\u0424\u0425"+ - "\u0006\u0085\u000b\u0000\u0425\u0426\u0006\u0085\f\u0000\u0426\u0116\u0001"+ - "\u0000\u0000\u0000\u0427\u0428\u0003g.\u0000\u0428\u0429\u0001\u0000\u0000"+ - "\u0000\u0429\u042a\u0006\u0086\u0011\u0000\u042a\u0118\u0001\u0000\u0000"+ - "\u0000\u042b\u042c\u0003\u00a7N\u0000\u042c\u042d\u0001\u0000\u0000\u0000"+ - "\u042d\u042e\u0006\u0087\u0010\u0000\u042e\u011a\u0001\u0000\u0000\u0000"+ - "\u042f\u0430\u0003\u00a3L\u0000\u0430\u0431\u0001\u0000\u0000\u0000\u0431"+ - "\u0432\u0006\u0088\u0017\u0000\u0432\u011c\u0001\u0000\u0000\u0000\u0433"+ - "\u0434\u0003/\u0012\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435\u0436"+ - "\u0006\u0089\b\u0000\u0436\u011e\u0001\u0000\u0000\u0000\u0437\u0438\u0003"+ - "1\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006\u008a"+ - "\b\u0000\u043a\u0120\u0001\u0000\u0000\u0000\u043b\u043c\u00033\u0014"+ - "\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006\u008b\b\u0000"+ - "\u043e\u0122\u0001\u0000\u0000\u0000\u043f\u0440\u0003?\u001a\u0000\u0440"+ - "\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u008c\u000b\u0000\u0442"+ - "\u0443\u0006\u008c\f\u0000\u0443\u0124\u0001\u0000\u0000\u0000\u0444\u0445"+ - "\u0005i\u0000\u0000\u0445\u0446\u0005n\u0000\u0000\u0446\u0447\u0005f"+ - "\u0000\u0000\u0447\u0448\u0005o\u0000\u0000\u0448\u0126\u0001\u0000\u0000"+ - "\u0000\u0449\u044a\u0005f\u0000\u0000\u044a\u044b\u0005u\u0000\u0000\u044b"+ - "\u044c\u0005n\u0000\u0000\u044c\u044d\u0005c\u0000\u0000\u044d\u044e\u0005"+ - "t\u0000\u0000\u044e\u044f\u0005i\u0000\u0000\u044f\u0450\u0005o\u0000"+ - "\u0000\u0450\u0451\u0005n\u0000\u0000\u0451\u0452\u0005s\u0000\u0000\u0452"+ - "\u0128\u0001\u0000\u0000\u0000\u0453\u0454\u0003/\u0012\u0000\u0454\u0455"+ - "\u0001\u0000\u0000\u0000\u0455\u0456\u0006\u008f\b\u0000\u0456\u012a\u0001"+ - "\u0000\u0000\u0000\u0457\u0458\u00031\u0013\u0000\u0458\u0459\u0001\u0000"+ - "\u0000\u0000\u0459\u045a\u0006\u0090\b\u0000\u045a\u012c\u0001\u0000\u0000"+ - "\u0000\u045b\u045c\u00033\u0014\u0000\u045c\u045d\u0001\u0000\u0000\u0000"+ - "\u045d\u045e\u0006\u0091\b\u0000\u045e\u012e\u0001\u0000\u0000\u0000\u045f"+ - "\u0460\u0003\u00a1K\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462"+ - "\u0006\u0092\r\u0000\u0462\u0463\u0006\u0092\f\u0000\u0463\u0130\u0001"+ - "\u0000\u0000\u0000\u0464\u0465\u0005:\u0000\u0000\u0465\u0132\u0001\u0000"+ - "\u0000\u0000\u0466\u046c\u0003K \u0000\u0467\u046c\u0003A\u001b\u0000"+ - "\u0468\u046c\u0003g.\u0000\u0469\u046c\u0003C\u001c\u0000\u046a\u046c"+ - "\u0003Q#\u0000\u046b\u0466\u0001\u0000\u0000\u0000\u046b\u0467\u0001\u0000"+ - "\u0000\u0000\u046b\u0468\u0001\u0000\u0000\u0000\u046b\u0469\u0001\u0000"+ - "\u0000\u0000\u046b\u046a\u0001\u0000\u0000\u0000\u046c\u046d\u0001\u0000"+ - "\u0000\u0000\u046d\u046b\u0001\u0000\u0000\u0000\u046d\u046e\u0001\u0000"+ - "\u0000\u0000\u046e\u0134\u0001\u0000\u0000\u0000\u046f\u0470\u0003/\u0012"+ - "\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006\u0095\b\u0000"+ - "\u0472\u0136\u0001\u0000\u0000\u0000\u0473\u0474\u00031\u0013\u0000\u0474"+ - "\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006\u0096\b\u0000\u0476\u0138"+ - "\u0001\u0000\u0000\u0000\u0477\u0478\u00033\u0014\u0000\u0478\u0479\u0001"+ - "\u0000\u0000\u0000\u0479\u047a\u0006\u0097\b\u0000\u047a\u013a\u0001\u0000"+ - "\u0000\u00009\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u01cd"+ - "\u01d7\u01db\u01de\u01e7\u01e9\u01f4\u021d\u0222\u022b\u0232\u0237\u0239"+ - "\u0244\u024c\u024f\u0251\u0256\u025b\u0261\u0268\u026d\u0273\u0276\u027e"+ - "\u0282\u0304\u0309\u030e\u0310\u0316\u0349\u034e\u0371\u0375\u037a\u037f"+ - "\u0384\u0386\u038a\u038c\u03d9\u03dd\u03e2\u046b\u046d\u0018\u0005\u0002"+ - "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003"+ - "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000"+ - "\u0007?\u0000\u0005\u0000\u0000\u0007\u0019\u0000\u0004\u0000\u0000\u0007"+ - "@\u0000\u0007!\u0000\u0007 \u0000\u0007B\u0000\u0007#\u0000\u0007K\u0000"+ - "\u0005\n\u0000\u0005\u0007\u0000\u0007U\u0000\u0007T\u0000\u0007A\u0000"; + "\u0001\u0000\u0000\u0000\u038d\u0399\u0001\u0000\u0000\u0000\u038e\u038c"+ + "\u0001\u0000\u0000\u0000\u038f\u0392\u0003T$\u0000\u0390\u0392\u0003N"+ + "!\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391\u0390\u0001\u0000\u0000"+ + "\u0000\u0392\u0394\u0001\u0000\u0000\u0000\u0393\u0395\u0003\u00d0b\u0000"+ + "\u0394\u0393\u0001\u0000\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000"+ + "\u0396\u0394\u0001\u0000\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000"+ + "\u0397\u0399\u0001\u0000\u0000\u0000\u0398\u0387\u0001\u0000\u0000\u0000"+ + "\u0398\u0391\u0001\u0000\u0000\u0000\u0399\u00d3\u0001\u0000\u0000\u0000"+ + "\u039a\u039d\u0003\u00d2c\u0000\u039b\u039d\u0003\u00a8N\u0000\u039c\u039a"+ + "\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d\u039e"+ + "\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000\u0000\u0000\u039e\u039f"+ + "\u0001\u0000\u0000\u0000\u039f\u00d5\u0001\u0000\u0000\u0000\u03a0\u03a1"+ + "\u00032\u0013\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0006"+ + "e\t\u0000\u03a3\u00d7\u0001\u0000\u0000\u0000\u03a4\u03a5\u00034\u0014"+ + "\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006f\t\u0000"+ + "\u03a7\u00d9\u0001\u0000\u0000\u0000\u03a8\u03a9\u00036\u0015\u0000\u03a9"+ + "\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006g\t\u0000\u03ab\u00db"+ + "\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003B\u001b\u0000\u03ad\u03ae\u0001"+ + "\u0000\u0000\u0000\u03ae\u03af\u0006h\f\u0000\u03af\u03b0\u0006h\r\u0000"+ + "\u03b0\u00dd\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003d,\u0000\u03b2\u03b3"+ + "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006i\u0010\u0000\u03b4\u00df\u0001"+ + "\u0000\u0000\u0000\u03b5\u03b6\u0003f-\u0000\u03b6\u03b7\u0001\u0000\u0000"+ + "\u0000\u03b7\u03b8\u0006j\u000f\u0000\u03b8\u00e1\u0001\u0000\u0000\u0000"+ + "\u03b9\u03ba\u0003j/\u0000\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc"+ + "\u0006k\u0012\u0000\u03bc\u00e3\u0001\u0000\u0000\u0000\u03bd\u03be\u0005"+ + "a\u0000\u0000\u03be\u03bf\u0005s\u0000\u0000\u03bf\u00e5\u0001\u0000\u0000"+ + "\u0000\u03c0\u03c1\u0003\u00d4d\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000"+ + "\u03c2\u03c3\u0006m\u0013\u0000\u03c3\u00e7\u0001\u0000\u0000\u0000\u03c4"+ + "\u03c5\u00032\u0013\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7"+ + "\u0006n\t\u0000\u03c7\u00e9\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003"+ + "4\u0014\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006o\t"+ + "\u0000\u03cb\u00eb\u0001\u0000\u0000\u0000\u03cc\u03cd\u00036\u0015\u0000"+ + "\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf\u0006p\t\u0000\u03cf"+ + "\u00ed\u0001\u0000\u0000\u0000\u03d0\u03d1\u0003B\u001b\u0000\u03d1\u03d2"+ + "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006q\f\u0000\u03d3\u03d4\u0006"+ + "q\r\u0000\u03d4\u00ef\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00a2"+ + "K\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006r\n\u0000"+ + "\u03d8\u03d9\u0006r\u0014\u0000\u03d9\u00f1\u0001\u0000\u0000\u0000\u03da"+ + "\u03db\u0005o\u0000\u0000\u03db\u03dc\u0005n\u0000\u0000\u03dc\u03dd\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0006s\u0015\u0000\u03de\u00f3\u0001\u0000"+ + "\u0000\u0000\u03df\u03e0\u0005w\u0000\u0000\u03e0\u03e1\u0005i\u0000\u0000"+ + "\u03e1\u03e2\u0005t\u0000\u0000\u03e2\u03e3\u0005h\u0000\u0000\u03e3\u03e4"+ + "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006t\u0015\u0000\u03e5\u00f5\u0001"+ + "\u0000\u0000\u0000\u03e6\u03e7\b\f\u0000\u0000\u03e7\u00f7\u0001\u0000"+ + "\u0000\u0000\u03e8\u03ea\u0003\u00f6u\u0000\u03e9\u03e8\u0001\u0000\u0000"+ + "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03e9\u0001\u0000\u0000"+ + "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0001\u0000\u0000"+ + "\u0000\u03ed\u03ee\u0003\u013c\u0098\u0000\u03ee\u03f0\u0001\u0000\u0000"+ + "\u0000\u03ef\u03e9\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000"+ + "\u0000\u03f0\u03f2\u0001\u0000\u0000\u0000\u03f1\u03f3\u0003\u00f6u\u0000"+ + "\u03f2\u03f1\u0001\u0000\u0000\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000"+ + "\u03f4\u03f2\u0001\u0000\u0000\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000"+ + "\u03f5\u00f9\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003\u00aaO\u0000\u03f7"+ + "\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006w\u0011\u0000\u03f9\u00fb"+ + "\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003\u00f8v\u0000\u03fb\u03fc\u0001"+ + "\u0000\u0000\u0000\u03fc\u03fd\u0006x\u0016\u0000\u03fd\u00fd\u0001\u0000"+ + "\u0000\u0000\u03fe\u03ff\u00032\u0013\u0000\u03ff\u0400\u0001\u0000\u0000"+ + "\u0000\u0400\u0401\u0006y\t\u0000\u0401\u00ff\u0001\u0000\u0000\u0000"+ + "\u0402\u0403\u00034\u0014\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ + "\u0405\u0006z\t\u0000\u0405\u0101\u0001\u0000\u0000\u0000\u0406\u0407"+ + "\u00036\u0015\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006"+ + "{\t\u0000\u0409\u0103\u0001\u0000\u0000\u0000\u040a\u040b\u0003B\u001b"+ + "\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006|\f\u0000"+ + "\u040d\u040e\u0006|\r\u0000\u040e\u040f\u0006|\r\u0000\u040f\u0105\u0001"+ + "\u0000\u0000\u0000\u0410\u0411\u0003d,\u0000\u0411\u0412\u0001\u0000\u0000"+ + "\u0000\u0412\u0413\u0006}\u0010\u0000\u0413\u0107\u0001\u0000\u0000\u0000"+ + "\u0414\u0415\u0003f-\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417"+ + "\u0006~\u000f\u0000\u0417\u0109\u0001\u0000\u0000\u0000\u0418\u0419\u0003"+ + "j/\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u007f\u0012"+ + "\u0000\u041b\u010b\u0001\u0000\u0000\u0000\u041c\u041d\u0003\u00f4t\u0000"+ + "\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041f\u0006\u0080\u0017\u0000"+ + "\u041f\u010d\u0001\u0000\u0000\u0000\u0420\u0421\u0003\u00d4d\u0000\u0421"+ + "\u0422\u0001\u0000\u0000\u0000\u0422\u0423\u0006\u0081\u0013\u0000\u0423"+ + "\u010f\u0001\u0000\u0000\u0000\u0424\u0425\u0003\u00aaO\u0000\u0425\u0426"+ + "\u0001\u0000\u0000\u0000\u0426\u0427\u0006\u0082\u0011\u0000\u0427\u0111"+ + "\u0001\u0000\u0000\u0000\u0428\u0429\u00032\u0013\u0000\u0429\u042a\u0001"+ + "\u0000\u0000\u0000\u042a\u042b\u0006\u0083\t\u0000\u042b\u0113\u0001\u0000"+ + "\u0000\u0000\u042c\u042d\u00034\u0014\u0000\u042d\u042e\u0001\u0000\u0000"+ + "\u0000\u042e\u042f\u0006\u0084\t\u0000\u042f\u0115\u0001\u0000\u0000\u0000"+ + "\u0430\u0431\u00036\u0015\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432"+ + "\u0433\u0006\u0085\t\u0000\u0433\u0117\u0001\u0000\u0000\u0000\u0434\u0435"+ + "\u0003B\u001b\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ + "\u0086\f\u0000\u0437\u0438\u0006\u0086\r\u0000\u0438\u0119\u0001\u0000"+ + "\u0000\u0000\u0439\u043a\u0003j/\u0000\u043a\u043b\u0001\u0000\u0000\u0000"+ + "\u043b\u043c\u0006\u0087\u0012\u0000\u043c\u011b\u0001\u0000\u0000\u0000"+ + "\u043d\u043e\u0003\u00aaO\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f"+ + "\u0440\u0006\u0088\u0011\u0000\u0440\u011d\u0001\u0000\u0000\u0000\u0441"+ + "\u0442\u0003\u00a6M\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444"+ + "\u0006\u0089\u0018\u0000\u0444\u011f\u0001\u0000\u0000\u0000\u0445\u0446"+ + "\u00032\u0013\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ + "\u008a\t\u0000\u0448\u0121\u0001\u0000\u0000\u0000\u0449\u044a\u00034"+ + "\u0014\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u008b"+ + "\t\u0000\u044c\u0123\u0001\u0000\u0000\u0000\u044d\u044e\u00036\u0015"+ + "\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450\u0006\u008c\t\u0000"+ + "\u0450\u0125\u0001\u0000\u0000\u0000\u0451\u0452\u0003B\u001b\u0000\u0452"+ + "\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u008d\f\u0000\u0454\u0455"+ + "\u0006\u008d\r\u0000\u0455\u0127\u0001\u0000\u0000\u0000\u0456\u0457\u0005"+ + "i\u0000\u0000\u0457\u0458\u0005n\u0000\u0000\u0458\u0459\u0005f\u0000"+ + "\u0000\u0459\u045a\u0005o\u0000\u0000\u045a\u0129\u0001\u0000\u0000\u0000"+ + "\u045b\u045c\u00032\u0013\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d"+ + "\u045e\u0006\u008f\t\u0000\u045e\u012b\u0001\u0000\u0000\u0000\u045f\u0460"+ + "\u00034\u0014\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462\u0006"+ + "\u0090\t\u0000\u0462\u012d\u0001\u0000\u0000\u0000\u0463\u0464\u00036"+ + "\u0015\u0000\u0464\u0465\u0001\u0000\u0000\u0000\u0465\u0466\u0006\u0091"+ + "\t\u0000\u0466\u012f\u0001\u0000\u0000\u0000\u0467\u0468\u0003B\u001b"+ + "\u0000\u0468\u0469\u0001\u0000\u0000\u0000\u0469\u046a\u0006\u0092\f\u0000"+ + "\u046a\u046b\u0006\u0092\r\u0000\u046b\u0131\u0001\u0000\u0000\u0000\u046c"+ + "\u046d\u0005f\u0000\u0000\u046d\u046e\u0005u\u0000\u0000\u046e\u046f\u0005"+ + "n\u0000\u0000\u046f\u0470\u0005c\u0000\u0000\u0470\u0471\u0005t\u0000"+ + "\u0000\u0471\u0472\u0005i\u0000\u0000\u0472\u0473\u0005o\u0000\u0000\u0473"+ + "\u0474\u0005n\u0000\u0000\u0474\u0475\u0005s\u0000\u0000\u0475\u0133\u0001"+ + "\u0000\u0000\u0000\u0476\u0477\u00032\u0013\u0000\u0477\u0478\u0001\u0000"+ + "\u0000\u0000\u0478\u0479\u0006\u0094\t\u0000\u0479\u0135\u0001\u0000\u0000"+ + "\u0000\u047a\u047b\u00034\u0014\u0000\u047b\u047c\u0001\u0000\u0000\u0000"+ + "\u047c\u047d\u0006\u0095\t\u0000\u047d\u0137\u0001\u0000\u0000\u0000\u047e"+ + "\u047f\u00036\u0015\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481"+ + "\u0006\u0096\t\u0000\u0481\u0139\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ + "\u00a4L\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u0097"+ + "\u000e\u0000\u0485\u0486\u0006\u0097\r\u0000\u0486\u013b\u0001\u0000\u0000"+ + "\u0000\u0487\u0488\u0005:\u0000\u0000\u0488\u013d\u0001\u0000\u0000\u0000"+ + "\u0489\u048f\u0003N!\u0000\u048a\u048f\u0003D\u001c\u0000\u048b\u048f"+ + "\u0003j/\u0000\u048c\u048f\u0003F\u001d\u0000\u048d\u048f\u0003T$\u0000"+ + "\u048e\u0489\u0001\u0000\u0000\u0000\u048e\u048a\u0001\u0000\u0000\u0000"+ + "\u048e\u048b\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000"+ + "\u048e\u048d\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000"+ + "\u0490\u048e\u0001\u0000\u0000\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u013f\u0001\u0000\u0000\u0000\u0492\u0493\u00032\u0013\u0000\u0493"+ + "\u0494\u0001\u0000\u0000\u0000\u0494\u0495\u0006\u009a\t\u0000\u0495\u0141"+ + "\u0001\u0000\u0000\u0000\u0496\u0497\u00034\u0014\u0000\u0497\u0498\u0001"+ + "\u0000\u0000\u0000\u0498\u0499\u0006\u009b\t\u0000\u0499\u0143\u0001\u0000"+ + "\u0000\u0000\u049a\u049b\u00036\u0015\u0000\u049b\u049c\u0001\u0000\u0000"+ + "\u0000\u049c\u049d\u0006\u009c\t\u0000\u049d\u0145\u0001\u0000\u0000\u0000"+ + ":\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01df\u01e9"+ + "\u01ed\u01f0\u01f9\u01fb\u0206\u022f\u0234\u023d\u0244\u0249\u024b\u0256"+ + "\u025e\u0261\u0263\u0268\u026d\u0273\u027a\u027f\u0285\u0288\u0290\u0294"+ + "\u0316\u031b\u0320\u0322\u0328\u035b\u0360\u0383\u0387\u038c\u0391\u0396"+ + "\u0398\u039c\u039e\u03eb\u03ef\u03f4\u048e\u0490\u0019\u0005\u0002\u0000"+ + "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ + "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ + "\u0000\u0007@\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ + "\u0007A\u0000\u0007\"\u0000\u0007!\u0000\u0007C\u0000\u0007$\u0000\u0007"+ + "L\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007V\u0000\u0007U\u0000"+ + "\u0007B\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a75449a305d3f..b8c5f609e75e5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -10,6 +10,7 @@ null 'inlinestats' 'keep' 'limit' +'meta' 'mv_expand' 'rename' 'row' @@ -95,6 +96,9 @@ null null null 'info' +null +null +null 'functions' null null @@ -117,6 +121,7 @@ GROK INLINESTATS KEEP LIMIT +META MV_EXPAND RENAME ROW @@ -202,10 +207,13 @@ MVEXPAND_LINE_COMMENT MVEXPAND_MULTILINE_COMMENT MVEXPAND_WS INFO -FUNCTIONS SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS +FUNCTIONS +META_LINE_COMMENT +META_MULTILINE_COMMENT +META_WS COLON SETTING SETTING_LINE_COMMENT @@ -261,9 +269,10 @@ comparisonOperator explainCommand subqueryExpression showCommand +metaCommand enrichCommand enrichWithClause atn: -[4, 1, 104, 507, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 110, 8, 1, 10, 1, 12, 1, 113, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 119, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 134, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 146, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 153, 8, 5, 10, 5, 12, 5, 156, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 163, 8, 5, 1, 5, 1, 5, 3, 5, 167, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 175, 8, 5, 10, 5, 12, 5, 178, 9, 5, 1, 6, 1, 6, 3, 6, 182, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 201, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 207, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 215, 8, 8, 10, 8, 12, 8, 218, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 227, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 235, 8, 10, 10, 10, 12, 10, 238, 9, 10, 3, 10, 240, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 250, 8, 12, 10, 12, 12, 12, 253, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 260, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 266, 8, 14, 10, 14, 12, 14, 269, 9, 14, 1, 14, 3, 14, 272, 8, 14, 1, 15, 1, 15, 3, 15, 276, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 282, 8, 16, 10, 16, 12, 16, 285, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 296, 8, 19, 1, 19, 1, 19, 3, 19, 300, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 306, 8, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 5, 22, 313, 8, 22, 10, 22, 12, 22, 316, 9, 22, 1, 23, 1, 23, 1, 23, 5, 23, 321, 8, 23, 10, 23, 12, 23, 324, 9, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 343, 8, 26, 10, 26, 12, 26, 346, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 354, 8, 26, 10, 26, 12, 26, 357, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 365, 8, 26, 10, 26, 12, 26, 368, 9, 26, 1, 26, 1, 26, 3, 26, 372, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 381, 8, 28, 10, 28, 12, 28, 384, 9, 28, 1, 29, 1, 29, 3, 29, 388, 8, 29, 1, 29, 1, 29, 3, 29, 392, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 398, 8, 30, 10, 30, 12, 30, 401, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 407, 8, 31, 10, 31, 12, 31, 410, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 416, 8, 32, 10, 32, 12, 32, 419, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 429, 8, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 5, 37, 441, 8, 37, 10, 37, 12, 37, 444, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 3, 40, 454, 8, 40, 1, 41, 3, 41, 457, 8, 41, 1, 41, 1, 41, 1, 42, 3, 42, 462, 8, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 481, 8, 47, 1, 48, 1, 48, 1, 48, 1, 48, 3, 48, 487, 8, 48, 1, 48, 1, 48, 1, 48, 1, 48, 5, 48, 493, 8, 48, 10, 48, 12, 48, 496, 9, 48, 3, 48, 498, 8, 48, 1, 49, 1, 49, 1, 49, 3, 49, 503, 8, 49, 1, 49, 1, 49, 1, 49, 0, 3, 2, 10, 16, 50, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 0, 8, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 31, 31, 34, 34, 1, 0, 37, 38, 2, 0, 36, 36, 50, 50, 2, 0, 51, 51, 53, 57, 533, 0, 100, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 4, 118, 1, 0, 0, 0, 6, 133, 1, 0, 0, 0, 8, 135, 1, 0, 0, 0, 10, 166, 1, 0, 0, 0, 12, 193, 1, 0, 0, 0, 14, 200, 1, 0, 0, 0, 16, 206, 1, 0, 0, 0, 18, 226, 1, 0, 0, 0, 20, 228, 1, 0, 0, 0, 22, 243, 1, 0, 0, 0, 24, 246, 1, 0, 0, 0, 26, 259, 1, 0, 0, 0, 28, 261, 1, 0, 0, 0, 30, 275, 1, 0, 0, 0, 32, 277, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 290, 1, 0, 0, 0, 38, 293, 1, 0, 0, 0, 40, 301, 1, 0, 0, 0, 42, 307, 1, 0, 0, 0, 44, 309, 1, 0, 0, 0, 46, 317, 1, 0, 0, 0, 48, 325, 1, 0, 0, 0, 50, 327, 1, 0, 0, 0, 52, 371, 1, 0, 0, 0, 54, 373, 1, 0, 0, 0, 56, 376, 1, 0, 0, 0, 58, 385, 1, 0, 0, 0, 60, 393, 1, 0, 0, 0, 62, 402, 1, 0, 0, 0, 64, 411, 1, 0, 0, 0, 66, 420, 1, 0, 0, 0, 68, 424, 1, 0, 0, 0, 70, 430, 1, 0, 0, 0, 72, 434, 1, 0, 0, 0, 74, 437, 1, 0, 0, 0, 76, 445, 1, 0, 0, 0, 78, 449, 1, 0, 0, 0, 80, 453, 1, 0, 0, 0, 82, 456, 1, 0, 0, 0, 84, 461, 1, 0, 0, 0, 86, 465, 1, 0, 0, 0, 88, 467, 1, 0, 0, 0, 90, 469, 1, 0, 0, 0, 92, 472, 1, 0, 0, 0, 94, 480, 1, 0, 0, 0, 96, 482, 1, 0, 0, 0, 98, 502, 1, 0, 0, 0, 100, 101, 3, 2, 1, 0, 101, 102, 5, 0, 0, 1, 102, 1, 1, 0, 0, 0, 103, 104, 6, 1, -1, 0, 104, 105, 3, 4, 2, 0, 105, 111, 1, 0, 0, 0, 106, 107, 10, 1, 0, 0, 107, 108, 5, 25, 0, 0, 108, 110, 3, 6, 3, 0, 109, 106, 1, 0, 0, 0, 110, 113, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112, 1, 0, 0, 0, 112, 3, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 114, 119, 3, 90, 45, 0, 115, 119, 3, 28, 14, 0, 116, 119, 3, 22, 11, 0, 117, 119, 3, 94, 47, 0, 118, 114, 1, 0, 0, 0, 118, 115, 1, 0, 0, 0, 118, 116, 1, 0, 0, 0, 118, 117, 1, 0, 0, 0, 119, 5, 1, 0, 0, 0, 120, 134, 3, 36, 18, 0, 121, 134, 3, 40, 20, 0, 122, 134, 3, 54, 27, 0, 123, 134, 3, 60, 30, 0, 124, 134, 3, 56, 28, 0, 125, 134, 3, 38, 19, 0, 126, 134, 3, 8, 4, 0, 127, 134, 3, 62, 31, 0, 128, 134, 3, 64, 32, 0, 129, 134, 3, 68, 34, 0, 130, 134, 3, 70, 35, 0, 131, 134, 3, 96, 48, 0, 132, 134, 3, 72, 36, 0, 133, 120, 1, 0, 0, 0, 133, 121, 1, 0, 0, 0, 133, 122, 1, 0, 0, 0, 133, 123, 1, 0, 0, 0, 133, 124, 1, 0, 0, 0, 133, 125, 1, 0, 0, 0, 133, 126, 1, 0, 0, 0, 133, 127, 1, 0, 0, 0, 133, 128, 1, 0, 0, 0, 133, 129, 1, 0, 0, 0, 133, 130, 1, 0, 0, 0, 133, 131, 1, 0, 0, 0, 133, 132, 1, 0, 0, 0, 134, 7, 1, 0, 0, 0, 135, 136, 5, 17, 0, 0, 136, 137, 3, 10, 5, 0, 137, 9, 1, 0, 0, 0, 138, 139, 6, 5, -1, 0, 139, 140, 5, 43, 0, 0, 140, 167, 3, 10, 5, 7, 141, 167, 3, 14, 7, 0, 142, 167, 3, 12, 6, 0, 143, 145, 3, 14, 7, 0, 144, 146, 5, 43, 0, 0, 145, 144, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 148, 5, 40, 0, 0, 148, 149, 5, 39, 0, 0, 149, 154, 3, 14, 7, 0, 150, 151, 5, 33, 0, 0, 151, 153, 3, 14, 7, 0, 152, 150, 1, 0, 0, 0, 153, 156, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 157, 1, 0, 0, 0, 156, 154, 1, 0, 0, 0, 157, 158, 5, 49, 0, 0, 158, 167, 1, 0, 0, 0, 159, 160, 3, 14, 7, 0, 160, 162, 5, 41, 0, 0, 161, 163, 5, 43, 0, 0, 162, 161, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 165, 5, 44, 0, 0, 165, 167, 1, 0, 0, 0, 166, 138, 1, 0, 0, 0, 166, 141, 1, 0, 0, 0, 166, 142, 1, 0, 0, 0, 166, 143, 1, 0, 0, 0, 166, 159, 1, 0, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 10, 4, 0, 0, 169, 170, 5, 30, 0, 0, 170, 175, 3, 10, 5, 5, 171, 172, 10, 3, 0, 0, 172, 173, 5, 46, 0, 0, 173, 175, 3, 10, 5, 4, 174, 168, 1, 0, 0, 0, 174, 171, 1, 0, 0, 0, 175, 178, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 11, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 179, 181, 3, 14, 7, 0, 180, 182, 5, 43, 0, 0, 181, 180, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 5, 42, 0, 0, 184, 185, 3, 86, 43, 0, 185, 194, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 43, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 48, 0, 0, 191, 192, 3, 86, 43, 0, 192, 194, 1, 0, 0, 0, 193, 179, 1, 0, 0, 0, 193, 186, 1, 0, 0, 0, 194, 13, 1, 0, 0, 0, 195, 201, 3, 16, 8, 0, 196, 197, 3, 16, 8, 0, 197, 198, 3, 88, 44, 0, 198, 199, 3, 16, 8, 0, 199, 201, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 200, 196, 1, 0, 0, 0, 201, 15, 1, 0, 0, 0, 202, 203, 6, 8, -1, 0, 203, 207, 3, 18, 9, 0, 204, 205, 7, 0, 0, 0, 205, 207, 3, 16, 8, 3, 206, 202, 1, 0, 0, 0, 206, 204, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 2, 0, 0, 209, 210, 7, 1, 0, 0, 210, 215, 3, 16, 8, 3, 211, 212, 10, 1, 0, 0, 212, 213, 7, 0, 0, 0, 213, 215, 3, 16, 8, 2, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 17, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 227, 3, 52, 26, 0, 220, 227, 3, 44, 22, 0, 221, 227, 3, 20, 10, 0, 222, 223, 5, 39, 0, 0, 223, 224, 3, 10, 5, 0, 224, 225, 5, 49, 0, 0, 225, 227, 1, 0, 0, 0, 226, 219, 1, 0, 0, 0, 226, 220, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 226, 222, 1, 0, 0, 0, 227, 19, 1, 0, 0, 0, 228, 229, 3, 48, 24, 0, 229, 239, 5, 39, 0, 0, 230, 240, 5, 60, 0, 0, 231, 236, 3, 10, 5, 0, 232, 233, 5, 33, 0, 0, 233, 235, 3, 10, 5, 0, 234, 232, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 230, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, 242, 5, 49, 0, 0, 242, 21, 1, 0, 0, 0, 243, 244, 5, 13, 0, 0, 244, 245, 3, 24, 12, 0, 245, 23, 1, 0, 0, 0, 246, 251, 3, 26, 13, 0, 247, 248, 5, 33, 0, 0, 248, 250, 3, 26, 13, 0, 249, 247, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 25, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 260, 3, 10, 5, 0, 255, 256, 3, 44, 22, 0, 256, 257, 5, 32, 0, 0, 257, 258, 3, 10, 5, 0, 258, 260, 1, 0, 0, 0, 259, 254, 1, 0, 0, 0, 259, 255, 1, 0, 0, 0, 260, 27, 1, 0, 0, 0, 261, 262, 5, 6, 0, 0, 262, 267, 3, 42, 21, 0, 263, 264, 5, 33, 0, 0, 264, 266, 3, 42, 21, 0, 265, 263, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 270, 272, 3, 30, 15, 0, 271, 270, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 29, 1, 0, 0, 0, 273, 276, 3, 32, 16, 0, 274, 276, 3, 34, 17, 0, 275, 273, 1, 0, 0, 0, 275, 274, 1, 0, 0, 0, 276, 31, 1, 0, 0, 0, 277, 278, 5, 70, 0, 0, 278, 283, 3, 42, 21, 0, 279, 280, 5, 33, 0, 0, 280, 282, 3, 42, 21, 0, 281, 279, 1, 0, 0, 0, 282, 285, 1, 0, 0, 0, 283, 281, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 33, 1, 0, 0, 0, 285, 283, 1, 0, 0, 0, 286, 287, 5, 63, 0, 0, 287, 288, 3, 32, 16, 0, 288, 289, 5, 64, 0, 0, 289, 35, 1, 0, 0, 0, 290, 291, 5, 4, 0, 0, 291, 292, 3, 24, 12, 0, 292, 37, 1, 0, 0, 0, 293, 295, 5, 16, 0, 0, 294, 296, 3, 24, 12, 0, 295, 294, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 298, 5, 29, 0, 0, 298, 300, 3, 24, 12, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 39, 1, 0, 0, 0, 301, 302, 5, 8, 0, 0, 302, 305, 3, 24, 12, 0, 303, 304, 5, 29, 0, 0, 304, 306, 3, 24, 12, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 41, 1, 0, 0, 0, 307, 308, 7, 2, 0, 0, 308, 43, 1, 0, 0, 0, 309, 314, 3, 48, 24, 0, 310, 311, 5, 35, 0, 0, 311, 313, 3, 48, 24, 0, 312, 310, 1, 0, 0, 0, 313, 316, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 45, 1, 0, 0, 0, 316, 314, 1, 0, 0, 0, 317, 322, 3, 50, 25, 0, 318, 319, 5, 35, 0, 0, 319, 321, 3, 50, 25, 0, 320, 318, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 47, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 325, 326, 7, 3, 0, 0, 326, 49, 1, 0, 0, 0, 327, 328, 5, 75, 0, 0, 328, 51, 1, 0, 0, 0, 329, 372, 5, 44, 0, 0, 330, 331, 3, 84, 42, 0, 331, 332, 5, 65, 0, 0, 332, 372, 1, 0, 0, 0, 333, 372, 3, 82, 41, 0, 334, 372, 3, 84, 42, 0, 335, 372, 3, 78, 39, 0, 336, 372, 5, 47, 0, 0, 337, 372, 3, 86, 43, 0, 338, 339, 5, 63, 0, 0, 339, 344, 3, 80, 40, 0, 340, 341, 5, 33, 0, 0, 341, 343, 3, 80, 40, 0, 342, 340, 1, 0, 0, 0, 343, 346, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 344, 345, 1, 0, 0, 0, 345, 347, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 347, 348, 5, 64, 0, 0, 348, 372, 1, 0, 0, 0, 349, 350, 5, 63, 0, 0, 350, 355, 3, 78, 39, 0, 351, 352, 5, 33, 0, 0, 352, 354, 3, 78, 39, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 358, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 359, 5, 64, 0, 0, 359, 372, 1, 0, 0, 0, 360, 361, 5, 63, 0, 0, 361, 366, 3, 86, 43, 0, 362, 363, 5, 33, 0, 0, 363, 365, 3, 86, 43, 0, 364, 362, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 366, 367, 1, 0, 0, 0, 367, 369, 1, 0, 0, 0, 368, 366, 1, 0, 0, 0, 369, 370, 5, 64, 0, 0, 370, 372, 1, 0, 0, 0, 371, 329, 1, 0, 0, 0, 371, 330, 1, 0, 0, 0, 371, 333, 1, 0, 0, 0, 371, 334, 1, 0, 0, 0, 371, 335, 1, 0, 0, 0, 371, 336, 1, 0, 0, 0, 371, 337, 1, 0, 0, 0, 371, 338, 1, 0, 0, 0, 371, 349, 1, 0, 0, 0, 371, 360, 1, 0, 0, 0, 372, 53, 1, 0, 0, 0, 373, 374, 5, 10, 0, 0, 374, 375, 5, 27, 0, 0, 375, 55, 1, 0, 0, 0, 376, 377, 5, 15, 0, 0, 377, 382, 3, 58, 29, 0, 378, 379, 5, 33, 0, 0, 379, 381, 3, 58, 29, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 57, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 3, 10, 5, 0, 386, 388, 7, 4, 0, 0, 387, 386, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 390, 5, 45, 0, 0, 390, 392, 7, 5, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 59, 1, 0, 0, 0, 393, 394, 5, 9, 0, 0, 394, 399, 3, 46, 23, 0, 395, 396, 5, 33, 0, 0, 396, 398, 3, 46, 23, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 61, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 2, 0, 0, 403, 408, 3, 46, 23, 0, 404, 405, 5, 33, 0, 0, 405, 407, 3, 46, 23, 0, 406, 404, 1, 0, 0, 0, 407, 410, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 408, 409, 1, 0, 0, 0, 409, 63, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 411, 412, 5, 12, 0, 0, 412, 417, 3, 66, 33, 0, 413, 414, 5, 33, 0, 0, 414, 416, 3, 66, 33, 0, 415, 413, 1, 0, 0, 0, 416, 419, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 417, 418, 1, 0, 0, 0, 418, 65, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 420, 421, 3, 46, 23, 0, 421, 422, 5, 79, 0, 0, 422, 423, 3, 46, 23, 0, 423, 67, 1, 0, 0, 0, 424, 425, 5, 1, 0, 0, 425, 426, 3, 18, 9, 0, 426, 428, 3, 86, 43, 0, 427, 429, 3, 74, 37, 0, 428, 427, 1, 0, 0, 0, 428, 429, 1, 0, 0, 0, 429, 69, 1, 0, 0, 0, 430, 431, 5, 7, 0, 0, 431, 432, 3, 18, 9, 0, 432, 433, 3, 86, 43, 0, 433, 71, 1, 0, 0, 0, 434, 435, 5, 11, 0, 0, 435, 436, 3, 44, 22, 0, 436, 73, 1, 0, 0, 0, 437, 442, 3, 76, 38, 0, 438, 439, 5, 33, 0, 0, 439, 441, 3, 76, 38, 0, 440, 438, 1, 0, 0, 0, 441, 444, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 75, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 445, 446, 3, 48, 24, 0, 446, 447, 5, 32, 0, 0, 447, 448, 3, 52, 26, 0, 448, 77, 1, 0, 0, 0, 449, 450, 7, 6, 0, 0, 450, 79, 1, 0, 0, 0, 451, 454, 3, 82, 41, 0, 452, 454, 3, 84, 42, 0, 453, 451, 1, 0, 0, 0, 453, 452, 1, 0, 0, 0, 454, 81, 1, 0, 0, 0, 455, 457, 7, 0, 0, 0, 456, 455, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 5, 28, 0, 0, 459, 83, 1, 0, 0, 0, 460, 462, 7, 0, 0, 0, 461, 460, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 5, 27, 0, 0, 464, 85, 1, 0, 0, 0, 465, 466, 5, 26, 0, 0, 466, 87, 1, 0, 0, 0, 467, 468, 7, 7, 0, 0, 468, 89, 1, 0, 0, 0, 469, 470, 5, 5, 0, 0, 470, 471, 3, 92, 46, 0, 471, 91, 1, 0, 0, 0, 472, 473, 5, 63, 0, 0, 473, 474, 3, 2, 1, 0, 474, 475, 5, 64, 0, 0, 475, 93, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 481, 5, 95, 0, 0, 478, 479, 5, 14, 0, 0, 479, 481, 5, 96, 0, 0, 480, 476, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 95, 1, 0, 0, 0, 482, 483, 5, 3, 0, 0, 483, 486, 5, 85, 0, 0, 484, 485, 5, 83, 0, 0, 485, 487, 3, 46, 23, 0, 486, 484, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 497, 1, 0, 0, 0, 488, 489, 5, 84, 0, 0, 489, 494, 3, 98, 49, 0, 490, 491, 5, 33, 0, 0, 491, 493, 3, 98, 49, 0, 492, 490, 1, 0, 0, 0, 493, 496, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 498, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 488, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 3, 46, 23, 0, 500, 501, 5, 32, 0, 0, 501, 503, 1, 0, 0, 0, 502, 499, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 3, 46, 23, 0, 505, 99, 1, 0, 0, 0, 50, 111, 118, 133, 145, 154, 162, 166, 174, 176, 181, 188, 193, 200, 206, 214, 216, 226, 236, 239, 251, 259, 267, 271, 275, 283, 295, 299, 305, 314, 322, 344, 355, 366, 371, 382, 387, 391, 399, 408, 417, 428, 442, 453, 456, 461, 480, 486, 494, 497, 502] \ No newline at end of file +[4, 1, 108, 510, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 112, 8, 1, 10, 1, 12, 1, 115, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 122, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 137, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 149, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 156, 8, 5, 10, 5, 12, 5, 159, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 166, 8, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 6, 1, 6, 3, 6, 185, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 197, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 204, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 210, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 218, 8, 8, 10, 8, 12, 8, 221, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 230, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 238, 8, 10, 10, 10, 12, 10, 241, 9, 10, 3, 10, 243, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 253, 8, 12, 10, 12, 12, 12, 256, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 263, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 269, 8, 14, 10, 14, 12, 14, 272, 9, 14, 1, 14, 3, 14, 275, 8, 14, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 285, 8, 16, 10, 16, 12, 16, 288, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 299, 8, 19, 1, 19, 1, 19, 3, 19, 303, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 309, 8, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 5, 22, 316, 8, 22, 10, 22, 12, 22, 319, 9, 22, 1, 23, 1, 23, 1, 23, 5, 23, 324, 8, 23, 10, 23, 12, 23, 327, 9, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 346, 8, 26, 10, 26, 12, 26, 349, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 384, 8, 28, 10, 28, 12, 28, 387, 9, 28, 1, 29, 1, 29, 3, 29, 391, 8, 29, 1, 29, 1, 29, 3, 29, 395, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 410, 8, 31, 10, 31, 12, 31, 413, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 419, 8, 32, 10, 32, 12, 32, 422, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 432, 8, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 5, 37, 444, 8, 37, 10, 37, 12, 37, 447, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 3, 40, 457, 8, 40, 1, 41, 3, 41, 460, 8, 41, 1, 41, 1, 41, 1, 42, 3, 42, 465, 8, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 490, 8, 49, 1, 49, 1, 49, 1, 49, 1, 49, 5, 49, 496, 8, 49, 10, 49, 12, 49, 499, 9, 49, 3, 49, 501, 8, 49, 1, 50, 1, 50, 1, 50, 3, 50, 506, 8, 50, 1, 50, 1, 50, 1, 50, 0, 3, 2, 10, 16, 51, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 72, 72, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 535, 0, 102, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 4, 121, 1, 0, 0, 0, 6, 136, 1, 0, 0, 0, 8, 138, 1, 0, 0, 0, 10, 169, 1, 0, 0, 0, 12, 196, 1, 0, 0, 0, 14, 203, 1, 0, 0, 0, 16, 209, 1, 0, 0, 0, 18, 229, 1, 0, 0, 0, 20, 231, 1, 0, 0, 0, 22, 246, 1, 0, 0, 0, 24, 249, 1, 0, 0, 0, 26, 262, 1, 0, 0, 0, 28, 264, 1, 0, 0, 0, 30, 278, 1, 0, 0, 0, 32, 280, 1, 0, 0, 0, 34, 289, 1, 0, 0, 0, 36, 293, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 312, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 330, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 379, 1, 0, 0, 0, 58, 388, 1, 0, 0, 0, 60, 396, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 414, 1, 0, 0, 0, 66, 423, 1, 0, 0, 0, 68, 427, 1, 0, 0, 0, 70, 433, 1, 0, 0, 0, 72, 437, 1, 0, 0, 0, 74, 440, 1, 0, 0, 0, 76, 448, 1, 0, 0, 0, 78, 452, 1, 0, 0, 0, 80, 456, 1, 0, 0, 0, 82, 459, 1, 0, 0, 0, 84, 464, 1, 0, 0, 0, 86, 468, 1, 0, 0, 0, 88, 470, 1, 0, 0, 0, 90, 472, 1, 0, 0, 0, 92, 475, 1, 0, 0, 0, 94, 479, 1, 0, 0, 0, 96, 482, 1, 0, 0, 0, 98, 485, 1, 0, 0, 0, 100, 505, 1, 0, 0, 0, 102, 103, 3, 2, 1, 0, 103, 104, 5, 0, 0, 1, 104, 1, 1, 0, 0, 0, 105, 106, 6, 1, -1, 0, 106, 107, 3, 4, 2, 0, 107, 113, 1, 0, 0, 0, 108, 109, 10, 1, 0, 0, 109, 110, 5, 26, 0, 0, 110, 112, 3, 6, 3, 0, 111, 108, 1, 0, 0, 0, 112, 115, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 3, 1, 0, 0, 0, 115, 113, 1, 0, 0, 0, 116, 122, 3, 90, 45, 0, 117, 122, 3, 28, 14, 0, 118, 122, 3, 22, 11, 0, 119, 122, 3, 94, 47, 0, 120, 122, 3, 96, 48, 0, 121, 116, 1, 0, 0, 0, 121, 117, 1, 0, 0, 0, 121, 118, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 120, 1, 0, 0, 0, 122, 5, 1, 0, 0, 0, 123, 137, 3, 36, 18, 0, 124, 137, 3, 40, 20, 0, 125, 137, 3, 54, 27, 0, 126, 137, 3, 60, 30, 0, 127, 137, 3, 56, 28, 0, 128, 137, 3, 38, 19, 0, 129, 137, 3, 8, 4, 0, 130, 137, 3, 62, 31, 0, 131, 137, 3, 64, 32, 0, 132, 137, 3, 68, 34, 0, 133, 137, 3, 70, 35, 0, 134, 137, 3, 98, 49, 0, 135, 137, 3, 72, 36, 0, 136, 123, 1, 0, 0, 0, 136, 124, 1, 0, 0, 0, 136, 125, 1, 0, 0, 0, 136, 126, 1, 0, 0, 0, 136, 127, 1, 0, 0, 0, 136, 128, 1, 0, 0, 0, 136, 129, 1, 0, 0, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 135, 1, 0, 0, 0, 137, 7, 1, 0, 0, 0, 138, 139, 5, 18, 0, 0, 139, 140, 3, 10, 5, 0, 140, 9, 1, 0, 0, 0, 141, 142, 6, 5, -1, 0, 142, 143, 5, 44, 0, 0, 143, 170, 3, 10, 5, 7, 144, 170, 3, 14, 7, 0, 145, 170, 3, 12, 6, 0, 146, 148, 3, 14, 7, 0, 147, 149, 5, 44, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 5, 41, 0, 0, 151, 152, 5, 40, 0, 0, 152, 157, 3, 14, 7, 0, 153, 154, 5, 34, 0, 0, 154, 156, 3, 14, 7, 0, 155, 153, 1, 0, 0, 0, 156, 159, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 1, 0, 0, 0, 159, 157, 1, 0, 0, 0, 160, 161, 5, 50, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 3, 14, 7, 0, 163, 165, 5, 42, 0, 0, 164, 166, 5, 44, 0, 0, 165, 164, 1, 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 168, 5, 45, 0, 0, 168, 170, 1, 0, 0, 0, 169, 141, 1, 0, 0, 0, 169, 144, 1, 0, 0, 0, 169, 145, 1, 0, 0, 0, 169, 146, 1, 0, 0, 0, 169, 162, 1, 0, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 10, 4, 0, 0, 172, 173, 5, 31, 0, 0, 173, 178, 3, 10, 5, 5, 174, 175, 10, 3, 0, 0, 175, 176, 5, 47, 0, 0, 176, 178, 3, 10, 5, 4, 177, 171, 1, 0, 0, 0, 177, 174, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 11, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 184, 3, 14, 7, 0, 183, 185, 5, 44, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 43, 0, 0, 187, 188, 3, 86, 43, 0, 188, 197, 1, 0, 0, 0, 189, 191, 3, 14, 7, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 49, 0, 0, 194, 195, 3, 86, 43, 0, 195, 197, 1, 0, 0, 0, 196, 182, 1, 0, 0, 0, 196, 189, 1, 0, 0, 0, 197, 13, 1, 0, 0, 0, 198, 204, 3, 16, 8, 0, 199, 200, 3, 16, 8, 0, 200, 201, 3, 88, 44, 0, 201, 202, 3, 16, 8, 0, 202, 204, 1, 0, 0, 0, 203, 198, 1, 0, 0, 0, 203, 199, 1, 0, 0, 0, 204, 15, 1, 0, 0, 0, 205, 206, 6, 8, -1, 0, 206, 210, 3, 18, 9, 0, 207, 208, 7, 0, 0, 0, 208, 210, 3, 16, 8, 3, 209, 205, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 219, 1, 0, 0, 0, 211, 212, 10, 2, 0, 0, 212, 213, 7, 1, 0, 0, 213, 218, 3, 16, 8, 3, 214, 215, 10, 1, 0, 0, 215, 216, 7, 0, 0, 0, 216, 218, 3, 16, 8, 2, 217, 211, 1, 0, 0, 0, 217, 214, 1, 0, 0, 0, 218, 221, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 17, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 230, 3, 52, 26, 0, 223, 230, 3, 44, 22, 0, 224, 230, 3, 20, 10, 0, 225, 226, 5, 40, 0, 0, 226, 227, 3, 10, 5, 0, 227, 228, 5, 50, 0, 0, 228, 230, 1, 0, 0, 0, 229, 222, 1, 0, 0, 0, 229, 223, 1, 0, 0, 0, 229, 224, 1, 0, 0, 0, 229, 225, 1, 0, 0, 0, 230, 19, 1, 0, 0, 0, 231, 232, 3, 48, 24, 0, 232, 242, 5, 40, 0, 0, 233, 243, 5, 61, 0, 0, 234, 239, 3, 10, 5, 0, 235, 236, 5, 34, 0, 0, 236, 238, 3, 10, 5, 0, 237, 235, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 243, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 233, 1, 0, 0, 0, 242, 234, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 245, 5, 50, 0, 0, 245, 21, 1, 0, 0, 0, 246, 247, 5, 14, 0, 0, 247, 248, 3, 24, 12, 0, 248, 23, 1, 0, 0, 0, 249, 254, 3, 26, 13, 0, 250, 251, 5, 34, 0, 0, 251, 253, 3, 26, 13, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 25, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 263, 3, 10, 5, 0, 258, 259, 3, 44, 22, 0, 259, 260, 5, 33, 0, 0, 260, 261, 3, 10, 5, 0, 261, 263, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 27, 1, 0, 0, 0, 264, 265, 5, 6, 0, 0, 265, 270, 3, 42, 21, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 42, 21, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 3, 30, 15, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 29, 1, 0, 0, 0, 276, 279, 3, 32, 16, 0, 277, 279, 3, 34, 17, 0, 278, 276, 1, 0, 0, 0, 278, 277, 1, 0, 0, 0, 279, 31, 1, 0, 0, 0, 280, 281, 5, 71, 0, 0, 281, 286, 3, 42, 21, 0, 282, 283, 5, 34, 0, 0, 283, 285, 3, 42, 21, 0, 284, 282, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 33, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 64, 0, 0, 290, 291, 3, 32, 16, 0, 291, 292, 5, 65, 0, 0, 292, 35, 1, 0, 0, 0, 293, 294, 5, 4, 0, 0, 294, 295, 3, 24, 12, 0, 295, 37, 1, 0, 0, 0, 296, 298, 5, 17, 0, 0, 297, 299, 3, 24, 12, 0, 298, 297, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 301, 5, 30, 0, 0, 301, 303, 3, 24, 12, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 39, 1, 0, 0, 0, 304, 305, 5, 8, 0, 0, 305, 308, 3, 24, 12, 0, 306, 307, 5, 30, 0, 0, 307, 309, 3, 24, 12, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 41, 1, 0, 0, 0, 310, 311, 7, 2, 0, 0, 311, 43, 1, 0, 0, 0, 312, 317, 3, 48, 24, 0, 313, 314, 5, 36, 0, 0, 314, 316, 3, 48, 24, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 45, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 325, 3, 50, 25, 0, 321, 322, 5, 36, 0, 0, 322, 324, 3, 50, 25, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 47, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 329, 7, 3, 0, 0, 329, 49, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 51, 1, 0, 0, 0, 332, 375, 5, 45, 0, 0, 333, 334, 3, 84, 42, 0, 334, 335, 5, 66, 0, 0, 335, 375, 1, 0, 0, 0, 336, 375, 3, 82, 41, 0, 337, 375, 3, 84, 42, 0, 338, 375, 3, 78, 39, 0, 339, 375, 5, 48, 0, 0, 340, 375, 3, 86, 43, 0, 341, 342, 5, 64, 0, 0, 342, 347, 3, 80, 40, 0, 343, 344, 5, 34, 0, 0, 344, 346, 3, 80, 40, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 350, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 65, 0, 0, 351, 375, 1, 0, 0, 0, 352, 353, 5, 64, 0, 0, 353, 358, 3, 78, 39, 0, 354, 355, 5, 34, 0, 0, 355, 357, 3, 78, 39, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 361, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 5, 65, 0, 0, 362, 375, 1, 0, 0, 0, 363, 364, 5, 64, 0, 0, 364, 369, 3, 86, 43, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 86, 43, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 373, 5, 65, 0, 0, 373, 375, 1, 0, 0, 0, 374, 332, 1, 0, 0, 0, 374, 333, 1, 0, 0, 0, 374, 336, 1, 0, 0, 0, 374, 337, 1, 0, 0, 0, 374, 338, 1, 0, 0, 0, 374, 339, 1, 0, 0, 0, 374, 340, 1, 0, 0, 0, 374, 341, 1, 0, 0, 0, 374, 352, 1, 0, 0, 0, 374, 363, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 377, 5, 10, 0, 0, 377, 378, 5, 28, 0, 0, 378, 55, 1, 0, 0, 0, 379, 380, 5, 16, 0, 0, 380, 385, 3, 58, 29, 0, 381, 382, 5, 34, 0, 0, 382, 384, 3, 58, 29, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 57, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 390, 3, 10, 5, 0, 389, 391, 7, 4, 0, 0, 390, 389, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 393, 5, 46, 0, 0, 393, 395, 7, 5, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 397, 5, 9, 0, 0, 397, 402, 3, 46, 23, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 46, 23, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 2, 0, 0, 406, 411, 3, 46, 23, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 46, 23, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 63, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 5, 13, 0, 0, 415, 420, 3, 66, 33, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 66, 33, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 65, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 3, 46, 23, 0, 424, 425, 5, 80, 0, 0, 425, 426, 3, 46, 23, 0, 426, 67, 1, 0, 0, 0, 427, 428, 5, 1, 0, 0, 428, 429, 3, 18, 9, 0, 429, 431, 3, 86, 43, 0, 430, 432, 3, 74, 37, 0, 431, 430, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 69, 1, 0, 0, 0, 433, 434, 5, 7, 0, 0, 434, 435, 3, 18, 9, 0, 435, 436, 3, 86, 43, 0, 436, 71, 1, 0, 0, 0, 437, 438, 5, 12, 0, 0, 438, 439, 3, 44, 22, 0, 439, 73, 1, 0, 0, 0, 440, 445, 3, 76, 38, 0, 441, 442, 5, 34, 0, 0, 442, 444, 3, 76, 38, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 75, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 3, 48, 24, 0, 449, 450, 5, 33, 0, 0, 450, 451, 3, 52, 26, 0, 451, 77, 1, 0, 0, 0, 452, 453, 7, 6, 0, 0, 453, 79, 1, 0, 0, 0, 454, 457, 3, 82, 41, 0, 455, 457, 3, 84, 42, 0, 456, 454, 1, 0, 0, 0, 456, 455, 1, 0, 0, 0, 457, 81, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 29, 0, 0, 462, 83, 1, 0, 0, 0, 463, 465, 7, 0, 0, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 5, 28, 0, 0, 467, 85, 1, 0, 0, 0, 468, 469, 5, 27, 0, 0, 469, 87, 1, 0, 0, 0, 470, 471, 7, 7, 0, 0, 471, 89, 1, 0, 0, 0, 472, 473, 5, 5, 0, 0, 473, 474, 3, 92, 46, 0, 474, 91, 1, 0, 0, 0, 475, 476, 5, 64, 0, 0, 476, 477, 3, 2, 1, 0, 477, 478, 5, 65, 0, 0, 478, 93, 1, 0, 0, 0, 479, 480, 5, 15, 0, 0, 480, 481, 5, 96, 0, 0, 481, 95, 1, 0, 0, 0, 482, 483, 5, 11, 0, 0, 483, 484, 5, 100, 0, 0, 484, 97, 1, 0, 0, 0, 485, 486, 5, 3, 0, 0, 486, 489, 5, 86, 0, 0, 487, 488, 5, 84, 0, 0, 488, 490, 3, 46, 23, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 500, 1, 0, 0, 0, 491, 492, 5, 85, 0, 0, 492, 497, 3, 100, 50, 0, 493, 494, 5, 34, 0, 0, 494, 496, 3, 100, 50, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 491, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 3, 46, 23, 0, 503, 504, 5, 33, 0, 0, 504, 506, 1, 0, 0, 0, 505, 502, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 3, 46, 23, 0, 508, 101, 1, 0, 0, 0, 49, 113, 121, 136, 148, 157, 165, 169, 177, 179, 184, 191, 196, 203, 209, 217, 219, 229, 239, 242, 254, 262, 270, 274, 278, 286, 298, 302, 308, 317, 325, 347, 358, 369, 374, 385, 390, 394, 402, 411, 420, 431, 445, 456, 459, 464, 489, 497, 500, 505] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e0e3a77c0ad6a..88eaf491ca9d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,25 +18,26 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, - EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, - PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, - ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, - LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, - RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, - GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, - CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, - EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, - FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, ID_PATTERN=75, - PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, - AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, - ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, - ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, - ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, - MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, - SETTING_WS=104; + KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, + STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, + WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, + PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, + ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, + LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, + RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, + GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, + FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, + PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, + AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, + ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, + ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, + ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, + MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, + META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -53,7 +54,7 @@ public class EsqlBaseParser extends Parser { RULE_booleanValue = 39, RULE_numericValue = 40, RULE_decimalValue = 41, RULE_integerValue = 42, RULE_string = 43, RULE_comparisonOperator = 44, RULE_explainCommand = 45, RULE_subqueryExpression = 46, RULE_showCommand = 47, - RULE_enrichCommand = 48, RULE_enrichWithClause = 49; + RULE_metaCommand = 48, RULE_enrichCommand = 49, RULE_enrichWithClause = 50; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -66,7 +67,7 @@ private static String[] makeRuleNames() { "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause" + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -74,25 +75,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", - "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, - null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", - "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", - "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", - "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, - null, null, "'metadata'", null, null, null, null, null, null, null, null, - "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, - null, null, null, null, null, "'info'", "'functions'", null, null, null, - "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", + "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", - "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", + "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -106,8 +107,9 @@ private static String[] makeSymbolicNames() { "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", + "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } @@ -195,9 +197,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(100); + setState(102); query(0); - setState(101); + setState(103); match(EOF); } } @@ -293,11 +295,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(104); + setState(106); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(111); + setState(113); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -308,16 +310,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(106); + setState(108); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(107); + setState(109); match(PIPE); - setState(108); + setState(110); processingCommand(); } } } - setState(113); + setState(115); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -348,6 +350,9 @@ public RowCommandContext rowCommand() { public ShowCommandContext showCommand() { return getRuleContext(ShowCommandContext.class,0); } + public MetaCommandContext metaCommand() { + return getRuleContext(MetaCommandContext.class,0); + } @SuppressWarnings("this-escape") public SourceCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -372,37 +377,44 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(118); + setState(121); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(114); + setState(116); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(115); + setState(117); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(116); + setState(118); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(117); + setState(119); showCommand(); } break; + case META: + enterOuterAlt(_localctx, 5); + { + setState(120); + metaCommand(); + } + break; default: throw new NoViableAltException(this); } @@ -483,97 +495,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(133); + setState(136); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(120); + setState(123); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(121); + setState(124); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(122); + setState(125); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(123); + setState(126); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(124); + setState(127); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(125); + setState(128); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(126); + setState(129); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(127); + setState(130); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(128); + setState(131); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(129); + setState(132); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(130); + setState(133); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(131); + setState(134); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(132); + setState(135); mvExpandCommand(); } break; @@ -624,9 +636,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(135); + setState(138); match(WHERE); - setState(136); + setState(139); booleanExpression(0); } } @@ -821,7 +833,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(166); + setState(169); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -830,9 +842,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(139); + setState(142); match(NOT); - setState(140); + setState(143); booleanExpression(7); } break; @@ -841,7 +853,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(141); + setState(144); valueExpression(); } break; @@ -850,7 +862,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(142); + setState(145); regexBooleanExpression(); } break; @@ -859,41 +871,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(143); + setState(146); valueExpression(); - setState(145); + setState(148); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(144); + setState(147); match(NOT); } } - setState(147); + setState(150); match(IN); - setState(148); + setState(151); match(LP); - setState(149); + setState(152); valueExpression(); - setState(154); + setState(157); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(150); + setState(153); match(COMMA); - setState(151); + setState(154); valueExpression(); } } - setState(156); + setState(159); _errHandler.sync(this); _la = _input.LA(1); } - setState(157); + setState(160); match(RP); } break; @@ -902,27 +914,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(159); + setState(162); valueExpression(); - setState(160); + setState(163); match(IS); - setState(162); + setState(165); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(161); + setState(164); match(NOT); } } - setState(164); + setState(167); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(176); + setState(179); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -930,7 +942,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(174); + setState(177); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -938,11 +950,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(168); + setState(171); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(169); + setState(172); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(170); + setState(173); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -951,18 +963,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(171); + setState(174); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(172); + setState(175); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(173); + setState(176); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(178); + setState(181); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1017,48 +1029,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(193); + setState(196); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(179); + setState(182); valueExpression(); - setState(181); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(180); + setState(183); match(NOT); } } - setState(183); + setState(186); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(184); + setState(187); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(186); + setState(189); valueExpression(); - setState(188); + setState(191); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(187); + setState(190); match(NOT); } } - setState(190); + setState(193); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(191); + setState(194); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1144,14 +1156,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(200); + setState(203); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(195); + setState(198); operatorExpression(0); } break; @@ -1159,11 +1171,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(196); + setState(199); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(197); + setState(200); comparisonOperator(); - setState(198); + setState(201); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1288,7 +1300,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(206); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1297,7 +1309,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(203); + setState(206); primaryExpression(); } break; @@ -1306,7 +1318,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(204); + setState(207); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1317,13 +1329,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(205); + setState(208); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(216); + setState(219); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1331,7 +1343,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(214); + setState(217); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1339,12 +1351,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(208); + setState(211); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(209); + setState(212); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1352,7 +1364,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(210); + setState(213); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1361,9 +1373,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(211); + setState(214); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(212); + setState(215); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1374,14 +1386,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(213); + setState(216); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(218); + setState(221); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1503,14 +1515,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); try { - setState(226); + setState(229); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(219); + setState(222); constant(); } break; @@ -1518,7 +1530,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(220); + setState(223); qualifiedName(); } break; @@ -1526,7 +1538,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(221); + setState(224); functionExpression(); } break; @@ -1534,11 +1546,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(222); + setState(225); match(LP); - setState(223); + setState(226); booleanExpression(0); - setState(224); + setState(227); match(RP); } break; @@ -1600,16 +1612,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(228); + setState(231); identifier(); - setState(229); + setState(232); match(LP); - setState(239); + setState(242); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(230); + setState(233); match(ASTERISK); } break; @@ -1629,21 +1641,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(231); + setState(234); booleanExpression(0); - setState(236); + setState(239); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(232); + setState(235); match(COMMA); - setState(233); + setState(236); booleanExpression(0); } } - setState(238); + setState(241); _errHandler.sync(this); _la = _input.LA(1); } @@ -1655,7 +1667,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(241); + setState(244); match(RP); } } @@ -1702,9 +1714,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(243); + setState(246); match(ROW); - setState(244); + setState(247); fields(); } } @@ -1758,23 +1770,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(246); + setState(249); field(); - setState(251); + setState(254); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(247); + setState(250); match(COMMA); - setState(248); + setState(251); field(); } } } - setState(253); + setState(256); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1824,24 +1836,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 26, RULE_field); try { - setState(259); + setState(262); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(254); + setState(257); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(255); + setState(258); qualifiedName(); - setState(256); + setState(259); match(ASSIGN); - setState(257); + setState(260); booleanExpression(0); } break; @@ -1901,34 +1913,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(261); + setState(264); match(FROM); - setState(262); + setState(265); fromIdentifier(); - setState(267); + setState(270); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(263); + setState(266); match(COMMA); - setState(264); + setState(267); fromIdentifier(); } } } - setState(269); + setState(272); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(271); + setState(274); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(270); + setState(273); metadata(); } break; @@ -1978,20 +1990,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 30, RULE_metadata); try { - setState(275); + setState(278); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(273); + setState(276); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(274); + setState(277); deprecated_metadata(); } break; @@ -2050,25 +2062,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(277); + setState(280); match(METADATA); - setState(278); + setState(281); fromIdentifier(); - setState(283); + setState(286); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,24,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(279); + setState(282); match(COMMA); - setState(280); + setState(283); fromIdentifier(); } } } - setState(285); + setState(288); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,24,_ctx); } @@ -2117,11 +2129,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(286); + setState(289); match(OPENING_BRACKET); - setState(287); + setState(290); metadataOption(); - setState(288); + setState(291); match(CLOSING_BRACKET); } } @@ -2168,9 +2180,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(293); match(EVAL); - setState(291); + setState(294); fields(); } } @@ -2223,26 +2235,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(293); + setState(296); match(STATS); - setState(295); + setState(298); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(294); + setState(297); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(299); + setState(302); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(297); + setState(300); match(BY); - setState(298); + setState(301); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2298,18 +2310,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(301); + setState(304); match(INLINESTATS); - setState(302); - ((InlinestatsCommandContext)_localctx).stats = fields(); setState(305); + ((InlinestatsCommandContext)_localctx).stats = fields(); + setState(308); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(303); + setState(306); match(BY); - setState(304); + setState(307); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2358,7 +2370,7 @@ public final FromIdentifierContext fromIdentifier() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); _la = _input.LA(1); if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2420,23 +2432,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(309); + setState(312); identifier(); - setState(314); + setState(317); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(310); + setState(313); match(DOT); - setState(311); + setState(314); identifier(); } } } - setState(316); + setState(319); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2492,23 +2504,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(317); + setState(320); identifierPattern(); - setState(322); + setState(325); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(318); + setState(321); match(DOT); - setState(319); + setState(322); identifierPattern(); } } } - setState(324); + setState(327); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } @@ -2556,7 +2568,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(325); + setState(328); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2608,7 +2620,7 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce try { enterOuterAlt(_localctx, 1); { - setState(327); + setState(330); match(ID_PATTERN); } } @@ -2877,14 +2889,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 52, RULE_constant); int _la; try { - setState(371); + setState(374); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(329); + setState(332); match(NULL); } break; @@ -2892,9 +2904,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(330); + setState(333); integerValue(); - setState(331); + setState(334); match(UNQUOTED_IDENTIFIER); } break; @@ -2902,7 +2914,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(333); + setState(336); decimalValue(); } break; @@ -2910,7 +2922,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(334); + setState(337); integerValue(); } break; @@ -2918,7 +2930,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(335); + setState(338); booleanValue(); } break; @@ -2926,7 +2938,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(336); + setState(339); match(PARAM); } break; @@ -2934,7 +2946,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(337); + setState(340); string(); } break; @@ -2942,27 +2954,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(338); + setState(341); match(OPENING_BRACKET); - setState(339); + setState(342); numericValue(); - setState(344); + setState(347); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(340); + setState(343); match(COMMA); - setState(341); + setState(344); numericValue(); } } - setState(346); + setState(349); _errHandler.sync(this); _la = _input.LA(1); } - setState(347); + setState(350); match(CLOSING_BRACKET); } break; @@ -2970,27 +2982,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(349); + setState(352); match(OPENING_BRACKET); - setState(350); + setState(353); booleanValue(); - setState(355); + setState(358); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(351); + setState(354); match(COMMA); - setState(352); + setState(355); booleanValue(); } } - setState(357); + setState(360); _errHandler.sync(this); _la = _input.LA(1); } - setState(358); + setState(361); match(CLOSING_BRACKET); } break; @@ -2998,27 +3010,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(360); + setState(363); match(OPENING_BRACKET); - setState(361); + setState(364); string(); - setState(366); + setState(369); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(362); + setState(365); match(COMMA); - setState(363); + setState(366); string(); } } - setState(368); + setState(371); _errHandler.sync(this); _la = _input.LA(1); } - setState(369); + setState(372); match(CLOSING_BRACKET); } break; @@ -3065,9 +3077,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(373); + setState(376); match(LIMIT); - setState(374); + setState(377); match(INTEGER_LITERAL); } } @@ -3122,25 +3134,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(379); match(SORT); - setState(377); + setState(380); orderExpression(); - setState(382); + setState(385); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(381); match(COMMA); - setState(379); + setState(382); orderExpression(); } } } - setState(384); + setState(387); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3196,14 +3208,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(385); + setState(388); booleanExpression(0); - setState(387); + setState(390); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(386); + setState(389); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3217,14 +3229,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(391); + setState(394); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(389); + setState(392); match(NULLS); - setState(390); + setState(393); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3291,25 +3303,25 @@ public final KeepCommandContext keepCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(393); + setState(396); match(KEEP); - setState(394); + setState(397); qualifiedNamePattern(); - setState(399); + setState(402); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(395); + setState(398); match(COMMA); - setState(396); + setState(399); qualifiedNamePattern(); } } } - setState(401); + setState(404); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } @@ -3366,25 +3378,25 @@ public final DropCommandContext dropCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(402); + setState(405); match(DROP); - setState(403); + setState(406); qualifiedNamePattern(); - setState(408); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(404); + setState(407); match(COMMA); - setState(405); + setState(408); qualifiedNamePattern(); } } } - setState(410); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } @@ -3441,25 +3453,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(411); + setState(414); match(RENAME); - setState(412); + setState(415); renameClause(); - setState(417); + setState(420); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(413); + setState(416); match(COMMA); - setState(414); + setState(417); renameClause(); } } } - setState(419); + setState(422); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } @@ -3513,11 +3525,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(420); + setState(423); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(421); + setState(424); match(AS); - setState(422); + setState(425); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3570,18 +3582,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(424); + setState(427); match(DISSECT); - setState(425); + setState(428); primaryExpression(); - setState(426); + setState(429); string(); - setState(428); + setState(431); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(427); + setState(430); commandOptions(); } break; @@ -3634,11 +3646,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(430); + setState(433); match(GROK); - setState(431); + setState(434); primaryExpression(); - setState(432); + setState(435); string(); } } @@ -3685,9 +3697,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(434); + setState(437); match(MV_EXPAND); - setState(435); + setState(438); qualifiedName(); } } @@ -3741,23 +3753,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(437); + setState(440); commandOption(); - setState(442); + setState(445); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(438); + setState(441); match(COMMA); - setState(439); + setState(442); commandOption(); } } } - setState(444); + setState(447); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } @@ -3809,11 +3821,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(445); + setState(448); identifier(); - setState(446); + setState(449); match(ASSIGN); - setState(447); + setState(450); constant(); } } @@ -3859,7 +3871,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(449); + setState(452); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3914,20 +3926,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 80, RULE_numericValue); try { - setState(453); + setState(456); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(451); + setState(454); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(452); + setState(455); integerValue(); } break; @@ -3976,12 +3988,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(459); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(455); + setState(458); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3994,7 +4006,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(458); + setState(461); match(DECIMAL_LITERAL); } } @@ -4041,12 +4053,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(461); + setState(464); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(460); + setState(463); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4059,7 +4071,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(463); + setState(466); match(INTEGER_LITERAL); } } @@ -4103,7 +4115,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(468); match(STRING); } } @@ -4153,9 +4165,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(467); + setState(470); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 281474976710656000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4208,9 +4220,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(472); match(EXPLAIN); - setState(470); + setState(473); subqueryExpression(); } } @@ -4258,11 +4270,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(472); + setState(475); match(OPENING_BRACKET); - setState(473); + setState(476); query(0); - setState(474); + setState(477); match(CLOSING_BRACKET); } } @@ -4311,54 +4323,77 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + + public final ShowCommandContext showCommand() throws RecognitionException { + ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); + enterRule(_localctx, 94, RULE_showCommand); + try { + _localctx = new ShowInfoContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(479); + match(SHOW); + setState(480); + match(INFO); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") - public static class ShowFunctionsContext extends ShowCommandContext { - public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); } + public static class MetaCommandContext extends ParserRuleContext { + @SuppressWarnings("this-escape") + public MetaCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_metaCommand; } + + @SuppressWarnings("this-escape") + public MetaCommandContext() { } + public void copyFrom(MetaCommandContext ctx) { + super.copyFrom(ctx); + } + } + @SuppressWarnings("CheckReturnValue") + public static class MetaFunctionsContext extends MetaCommandContext { + public TerminalNode META() { return getToken(EsqlBaseParser.META, 0); } public TerminalNode FUNCTIONS() { return getToken(EsqlBaseParser.FUNCTIONS, 0); } @SuppressWarnings("this-escape") - public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); } + public MetaFunctionsContext(MetaCommandContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowFunctions(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetaFunctions(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitShowFunctions(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetaFunctions(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitShowFunctions(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetaFunctions(this); else return visitor.visitChildren(this); } } - public final ShowCommandContext showCommand() throws RecognitionException { - ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_showCommand); + public final MetaCommandContext metaCommand() throws RecognitionException { + MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); + enterRule(_localctx, 96, RULE_metaCommand); try { - setState(480); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { - case 1: - _localctx = new ShowInfoContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(476); - match(SHOW); - setState(477); - match(INFO); - } - break; - case 2: - _localctx = new ShowFunctionsContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(478); - match(SHOW); - setState(479); - match(FUNCTIONS); - } - break; + _localctx = new MetaFunctionsContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(482); + match(META); + setState(483); + match(FUNCTIONS); } } catch (RecognitionException re) { @@ -4415,53 +4450,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_enrichCommand); + enterRule(_localctx, 98, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(482); + setState(485); match(ENRICH); - setState(483); - ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); setState(486); + ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); + setState(489); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(484); + setState(487); match(ON); - setState(485); + setState(488); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(497); + setState(500); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(488); + setState(491); match(WITH); - setState(489); + setState(492); enrichWithClause(); - setState(494); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(490); + setState(493); match(COMMA); - setState(491); + setState(494); enrichWithClause(); } } } - setState(496); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } } break; @@ -4512,23 +4547,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_enrichWithClause); + enterRule(_localctx, 100, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(502); + setState(505); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(499); + setState(502); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(500); + setState(503); match(ASSIGN); } break; } - setState(504); + setState(507); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4581,7 +4616,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001h\u01fb\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001l\u01fe\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4594,317 +4629,318 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0005\u0001n\b\u0001\n\u0001\f\u0001q\t"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002w\b"+ - "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ + "2\u00072\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001p\b\u0001\n\u0001"+ + "\f\u0001s\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002z\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0003\u0003\u0086\b\u0003\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0003\u0005\u0092\b\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u0099\b\u0005\n\u0005\f\u0005"+ - "\u009c\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0003\u0005\u00a3\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a7\b"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0005\u0005\u00af\b\u0005\n\u0005\f\u0005\u00b2\t\u0005\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00b6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0003\u0006\u00bd\b\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0003\u0007\u00c9\b\u0007\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0003\b\u00cf\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0005\b\u00d7\b\b\n\b\f\b\u00da\t\b\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0003\t\u00e3\b\t\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0005\n\u00eb\b\n\n\n\f\n\u00ee\t\n\u0003\n\u00f0\b"+ - "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f"+ - "\u0001\f\u0005\f\u00fa\b\f\n\f\f\f\u00fd\t\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0003\r\u0104\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0005\u000e\u010a\b\u000e\n\u000e\f\u000e\u010d\t\u000e\u0001\u000e"+ - "\u0003\u000e\u0110\b\u000e\u0001\u000f\u0001\u000f\u0003\u000f\u0114\b"+ - "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u011a"+ - "\b\u0010\n\u0010\f\u0010\u011d\t\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0003\u0013\u0128\b\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u012c\b"+ - "\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0132"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0089\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u0095\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u009c"+ + "\b\u0005\n\u0005\f\u0005\u009f\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00a6\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00aa\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00b2\b\u0005\n\u0005\f\u0005\u00b5"+ + "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b9\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c0\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c5\b\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00cc\b\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00d2\b\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00da\b\b\n\b\f\b\u00dd\t\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00e6\b\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00ee\b\n\n\n\f\n\u00f1"+ + "\t\n\u0003\n\u00f3\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0005\f\u00fd\b\f\n\f\f\f\u0100\t\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0003\r\u0107\b\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0005\u000e\u010d\b\u000e\n\u000e\f\u000e\u0110\t\u000e"+ + "\u0001\u000e\u0003\u000e\u0113\b\u000e\u0001\u000f\u0001\u000f\u0003\u000f"+ + "\u0117\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010"+ + "\u011d\b\u0010\n\u0010\f\u0010\u0120\t\u0010\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0003\u0013\u012b\b\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u012f"+ + "\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0135"+ "\b\u0014\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+ - "\u0016\u0139\b\u0016\n\u0016\f\u0016\u013c\t\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0005\u0017\u0141\b\u0017\n\u0017\f\u0017\u0144\t\u0017\u0001"+ + "\u0016\u013c\b\u0016\n\u0016\f\u0016\u013f\t\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0005\u0017\u0144\b\u0017\n\u0017\f\u0017\u0147\t\u0017\u0001"+ "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0157"+ - "\b\u001a\n\u001a\f\u001a\u015a\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0162\b\u001a\n\u001a"+ - "\f\u001a\u0165\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0005\u001a\u016d\b\u001a\n\u001a\f\u001a\u0170"+ - "\t\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0174\b\u001a\u0001\u001b"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u015a"+ + "\b\u001a\n\u001a\f\u001a\u015d\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0165\b\u001a\n\u001a"+ + "\f\u001a\u0168\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001a\u0001\u001a\u0005\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173"+ + "\t\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001b"+ "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u017d\b\u001c\n\u001c\f\u001c\u0180\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0003\u001d\u0184\b\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u0188"+ - "\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u018e"+ - "\b\u001e\n\u001e\f\u001e\u0191\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0005\u001f\u0197\b\u001f\n\u001f\f\u001f\u019a\t\u001f\u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01a0\b \n \f \u01a3\t \u0001!\u0001!\u0001"+ - "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0003\"\u01ad\b\"\u0001#\u0001"+ - "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0005%\u01b9"+ - "\b%\n%\f%\u01bc\t%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001("+ - "\u0001(\u0003(\u01c6\b(\u0001)\u0003)\u01c9\b)\u0001)\u0001)\u0001*\u0003"+ - "*\u01ce\b*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0003/\u01e1"+ - "\b/\u00010\u00010\u00010\u00010\u00030\u01e7\b0\u00010\u00010\u00010\u0001"+ - "0\u00050\u01ed\b0\n0\f0\u01f0\t0\u00030\u01f2\b0\u00011\u00011\u00011"+ - "\u00031\u01f7\b1\u00011\u00011\u00011\u0000\u0003\u0002\n\u00102\u0000"+ + "\u0005\u001c\u0180\b\u001c\n\u001c\f\u001c\u0183\t\u001c\u0001\u001d\u0001"+ + "\u001d\u0003\u001d\u0187\b\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u018b"+ + "\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0191"+ + "\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0005\u001f\u019a\b\u001f\n\u001f\f\u001f\u019d\t\u001f\u0001"+ + " \u0001 \u0001 \u0001 \u0005 \u01a3\b \n \f \u01a6\t \u0001!\u0001!\u0001"+ + "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0003\"\u01b0\b\"\u0001#\u0001"+ + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0005%\u01bc"+ + "\b%\n%\f%\u01bf\t%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001("+ + "\u0001(\u0003(\u01c9\b(\u0001)\u0003)\u01cc\b)\u0001)\u0001)\u0001*\u0003"+ + "*\u01d1\b*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00011\u00011\u00011\u00011\u00031\u01ea\b1\u00011\u00011\u00011\u0001"+ + "1\u00051\u01f0\b1\n1\f1\u01f3\t1\u00031\u01f5\b1\u00012\u00012\u00012"+ + "\u00032\u01fa\b2\u00012\u00012\u00012\u0000\u0003\u0002\n\u00103\u0000"+ "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ - "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`b\u0000\b\u0001\u0000:;\u0001"+ - "\u0000<>\u0002\u0000BBGG\u0001\u0000AB\u0002\u0000\u001f\u001f\"\"\u0001"+ - "\u0000%&\u0002\u0000$$22\u0002\u00003359\u0215\u0000d\u0001\u0000\u0000"+ - "\u0000\u0002g\u0001\u0000\u0000\u0000\u0004v\u0001\u0000\u0000\u0000\u0006"+ - "\u0085\u0001\u0000\u0000\u0000\b\u0087\u0001\u0000\u0000\u0000\n\u00a6"+ - "\u0001\u0000\u0000\u0000\f\u00c1\u0001\u0000\u0000\u0000\u000e\u00c8\u0001"+ - "\u0000\u0000\u0000\u0010\u00ce\u0001\u0000\u0000\u0000\u0012\u00e2\u0001"+ - "\u0000\u0000\u0000\u0014\u00e4\u0001\u0000\u0000\u0000\u0016\u00f3\u0001"+ - "\u0000\u0000\u0000\u0018\u00f6\u0001\u0000\u0000\u0000\u001a\u0103\u0001"+ - "\u0000\u0000\u0000\u001c\u0105\u0001\u0000\u0000\u0000\u001e\u0113\u0001"+ - "\u0000\u0000\u0000 \u0115\u0001\u0000\u0000\u0000\"\u011e\u0001\u0000"+ - "\u0000\u0000$\u0122\u0001\u0000\u0000\u0000&\u0125\u0001\u0000\u0000\u0000"+ - "(\u012d\u0001\u0000\u0000\u0000*\u0133\u0001\u0000\u0000\u0000,\u0135"+ - "\u0001\u0000\u0000\u0000.\u013d\u0001\u0000\u0000\u00000\u0145\u0001\u0000"+ - "\u0000\u00002\u0147\u0001\u0000\u0000\u00004\u0173\u0001\u0000\u0000\u0000"+ - "6\u0175\u0001\u0000\u0000\u00008\u0178\u0001\u0000\u0000\u0000:\u0181"+ - "\u0001\u0000\u0000\u0000<\u0189\u0001\u0000\u0000\u0000>\u0192\u0001\u0000"+ - "\u0000\u0000@\u019b\u0001\u0000\u0000\u0000B\u01a4\u0001\u0000\u0000\u0000"+ - "D\u01a8\u0001\u0000\u0000\u0000F\u01ae\u0001\u0000\u0000\u0000H\u01b2"+ - "\u0001\u0000\u0000\u0000J\u01b5\u0001\u0000\u0000\u0000L\u01bd\u0001\u0000"+ - "\u0000\u0000N\u01c1\u0001\u0000\u0000\u0000P\u01c5\u0001\u0000\u0000\u0000"+ - "R\u01c8\u0001\u0000\u0000\u0000T\u01cd\u0001\u0000\u0000\u0000V\u01d1"+ - "\u0001\u0000\u0000\u0000X\u01d3\u0001\u0000\u0000\u0000Z\u01d5\u0001\u0000"+ - "\u0000\u0000\\\u01d8\u0001\u0000\u0000\u0000^\u01e0\u0001\u0000\u0000"+ - "\u0000`\u01e2\u0001\u0000\u0000\u0000b\u01f6\u0001\u0000\u0000\u0000d"+ - "e\u0003\u0002\u0001\u0000ef\u0005\u0000\u0000\u0001f\u0001\u0001\u0000"+ - "\u0000\u0000gh\u0006\u0001\uffff\uffff\u0000hi\u0003\u0004\u0002\u0000"+ - "io\u0001\u0000\u0000\u0000jk\n\u0001\u0000\u0000kl\u0005\u0019\u0000\u0000"+ - "ln\u0003\u0006\u0003\u0000mj\u0001\u0000\u0000\u0000nq\u0001\u0000\u0000"+ - "\u0000om\u0001\u0000\u0000\u0000op\u0001\u0000\u0000\u0000p\u0003\u0001"+ - "\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000rw\u0003Z-\u0000sw\u0003\u001c"+ - "\u000e\u0000tw\u0003\u0016\u000b\u0000uw\u0003^/\u0000vr\u0001\u0000\u0000"+ - "\u0000vs\u0001\u0000\u0000\u0000vt\u0001\u0000\u0000\u0000vu\u0001\u0000"+ - "\u0000\u0000w\u0005\u0001\u0000\u0000\u0000x\u0086\u0003$\u0012\u0000"+ - "y\u0086\u0003(\u0014\u0000z\u0086\u00036\u001b\u0000{\u0086\u0003<\u001e"+ - "\u0000|\u0086\u00038\u001c\u0000}\u0086\u0003&\u0013\u0000~\u0086\u0003"+ - "\b\u0004\u0000\u007f\u0086\u0003>\u001f\u0000\u0080\u0086\u0003@ \u0000"+ - "\u0081\u0086\u0003D\"\u0000\u0082\u0086\u0003F#\u0000\u0083\u0086\u0003"+ - "`0\u0000\u0084\u0086\u0003H$\u0000\u0085x\u0001\u0000\u0000\u0000\u0085"+ - "y\u0001\u0000\u0000\u0000\u0085z\u0001\u0000\u0000\u0000\u0085{\u0001"+ - "\u0000\u0000\u0000\u0085|\u0001\u0000\u0000\u0000\u0085}\u0001\u0000\u0000"+ - "\u0000\u0085~\u0001\u0000\u0000\u0000\u0085\u007f\u0001\u0000\u0000\u0000"+ - "\u0085\u0080\u0001\u0000\u0000\u0000\u0085\u0081\u0001\u0000\u0000\u0000"+ - "\u0085\u0082\u0001\u0000\u0000\u0000\u0085\u0083\u0001\u0000\u0000\u0000"+ - "\u0085\u0084\u0001\u0000\u0000\u0000\u0086\u0007\u0001\u0000\u0000\u0000"+ - "\u0087\u0088\u0005\u0011\u0000\u0000\u0088\u0089\u0003\n\u0005\u0000\u0089"+ - "\t\u0001\u0000\u0000\u0000\u008a\u008b\u0006\u0005\uffff\uffff\u0000\u008b"+ - "\u008c\u0005+\u0000\u0000\u008c\u00a7\u0003\n\u0005\u0007\u008d\u00a7"+ - "\u0003\u000e\u0007\u0000\u008e\u00a7\u0003\f\u0006\u0000\u008f\u0091\u0003"+ - "\u000e\u0007\u0000\u0090\u0092\u0005+\u0000\u0000\u0091\u0090\u0001\u0000"+ - "\u0000\u0000\u0091\u0092\u0001\u0000\u0000\u0000\u0092\u0093\u0001\u0000"+ - "\u0000\u0000\u0093\u0094\u0005(\u0000\u0000\u0094\u0095\u0005\'\u0000"+ - "\u0000\u0095\u009a\u0003\u000e\u0007\u0000\u0096\u0097\u0005!\u0000\u0000"+ - "\u0097\u0099\u0003\u000e\u0007\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ - "\u0099\u009c\u0001\u0000\u0000\u0000\u009a\u0098\u0001\u0000\u0000\u0000"+ - "\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009d\u0001\u0000\u0000\u0000"+ - "\u009c\u009a\u0001\u0000\u0000\u0000\u009d\u009e\u00051\u0000\u0000\u009e"+ - "\u00a7\u0001\u0000\u0000\u0000\u009f\u00a0\u0003\u000e\u0007\u0000\u00a0"+ - "\u00a2\u0005)\u0000\u0000\u00a1\u00a3\u0005+\u0000\u0000\u00a2\u00a1\u0001"+ - "\u0000\u0000\u0000\u00a2\u00a3\u0001\u0000\u0000\u0000\u00a3\u00a4\u0001"+ - "\u0000\u0000\u0000\u00a4\u00a5\u0005,\u0000\u0000\u00a5\u00a7\u0001\u0000"+ - "\u0000\u0000\u00a6\u008a\u0001\u0000\u0000\u0000\u00a6\u008d\u0001\u0000"+ - "\u0000\u0000\u00a6\u008e\u0001\u0000\u0000\u0000\u00a6\u008f\u0001\u0000"+ - "\u0000\u0000\u00a6\u009f\u0001\u0000\u0000\u0000\u00a7\u00b0\u0001\u0000"+ - "\u0000\u0000\u00a8\u00a9\n\u0004\u0000\u0000\u00a9\u00aa\u0005\u001e\u0000"+ - "\u0000\u00aa\u00af\u0003\n\u0005\u0005\u00ab\u00ac\n\u0003\u0000\u0000"+ - "\u00ac\u00ad\u0005.\u0000\u0000\u00ad\u00af\u0003\n\u0005\u0004\u00ae"+ - "\u00a8\u0001\u0000\u0000\u0000\u00ae\u00ab\u0001\u0000\u0000\u0000\u00af"+ - "\u00b2\u0001\u0000\u0000\u0000\u00b0\u00ae\u0001\u0000\u0000\u0000\u00b0"+ - "\u00b1\u0001\u0000\u0000\u0000\u00b1\u000b\u0001\u0000\u0000\u0000\u00b2"+ - "\u00b0\u0001\u0000\u0000\u0000\u00b3\u00b5\u0003\u000e\u0007\u0000\u00b4"+ - "\u00b6\u0005+\u0000\u0000\u00b5\u00b4\u0001\u0000\u0000\u0000\u00b5\u00b6"+ - "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0005*\u0000\u0000\u00b8\u00b9\u0003V+\u0000\u00b9\u00c2\u0001\u0000"+ - "\u0000\u0000\u00ba\u00bc\u0003\u000e\u0007\u0000\u00bb\u00bd\u0005+\u0000"+ - "\u0000\u00bc\u00bb\u0001\u0000\u0000\u0000\u00bc\u00bd\u0001\u0000\u0000"+ - "\u0000\u00bd\u00be\u0001\u0000\u0000\u0000\u00be\u00bf\u00050\u0000\u0000"+ - "\u00bf\u00c0\u0003V+\u0000\u00c0\u00c2\u0001\u0000\u0000\u0000\u00c1\u00b3"+ - "\u0001\u0000\u0000\u0000\u00c1\u00ba\u0001\u0000\u0000\u0000\u00c2\r\u0001"+ - "\u0000\u0000\u0000\u00c3\u00c9\u0003\u0010\b\u0000\u00c4\u00c5\u0003\u0010"+ - "\b\u0000\u00c5\u00c6\u0003X,\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7"+ - "\u00c9\u0001\u0000\u0000\u0000\u00c8\u00c3\u0001\u0000\u0000\u0000\u00c8"+ - "\u00c4\u0001\u0000\u0000\u0000\u00c9\u000f\u0001\u0000\u0000\u0000\u00ca"+ - "\u00cb\u0006\b\uffff\uffff\u0000\u00cb\u00cf\u0003\u0012\t\u0000\u00cc"+ - "\u00cd\u0007\u0000\u0000\u0000\u00cd\u00cf\u0003\u0010\b\u0003\u00ce\u00ca"+ - "\u0001\u0000\u0000\u0000\u00ce\u00cc\u0001\u0000\u0000\u0000\u00cf\u00d8"+ - "\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0002\u0000\u0000\u00d1\u00d2\u0007"+ - "\u0001\u0000\u0000\u00d2\u00d7\u0003\u0010\b\u0003\u00d3\u00d4\n\u0001"+ - "\u0000\u0000\u00d4\u00d5\u0007\u0000\u0000\u0000\u00d5\u00d7\u0003\u0010"+ - "\b\u0002\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000"+ - "\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u0011\u0001\u0000\u0000"+ - "\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00e3\u00034\u001a\u0000"+ - "\u00dc\u00e3\u0003,\u0016\u0000\u00dd\u00e3\u0003\u0014\n\u0000\u00de"+ - "\u00df\u0005\'\u0000\u0000\u00df\u00e0\u0003\n\u0005\u0000\u00e0\u00e1"+ - "\u00051\u0000\u0000\u00e1\u00e3\u0001\u0000\u0000\u0000\u00e2\u00db\u0001"+ - "\u0000\u0000\u0000\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00dd\u0001"+ - "\u0000\u0000\u0000\u00e2\u00de\u0001\u0000\u0000\u0000\u00e3\u0013\u0001"+ - "\u0000\u0000\u0000\u00e4\u00e5\u00030\u0018\u0000\u00e5\u00ef\u0005\'"+ - "\u0000\u0000\u00e6\u00f0\u0005<\u0000\u0000\u00e7\u00ec\u0003\n\u0005"+ - "\u0000\u00e8\u00e9\u0005!\u0000\u0000\u00e9\u00eb\u0003\n\u0005\u0000"+ - "\u00ea\u00e8\u0001\u0000\u0000\u0000\u00eb\u00ee\u0001\u0000\u0000\u0000"+ - "\u00ec\u00ea\u0001\u0000\u0000\u0000\u00ec\u00ed\u0001\u0000\u0000\u0000"+ - "\u00ed\u00f0\u0001\u0000\u0000\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000"+ - "\u00ef\u00e6\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000"+ - "\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1\u0001\u0000\u0000\u0000"+ - "\u00f1\u00f2\u00051\u0000\u0000\u00f2\u0015\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f4\u0005\r\u0000\u0000\u00f4\u00f5\u0003\u0018\f\u0000\u00f5\u0017"+ - "\u0001\u0000\u0000\u0000\u00f6\u00fb\u0003\u001a\r\u0000\u00f7\u00f8\u0005"+ - "!\u0000\u0000\u00f8\u00fa\u0003\u001a\r\u0000\u00f9\u00f7\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0019\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u0104\u0003\n\u0005"+ - "\u0000\u00ff\u0100\u0003,\u0016\u0000\u0100\u0101\u0005 \u0000\u0000\u0101"+ - "\u0102\u0003\n\u0005\u0000\u0102\u0104\u0001\u0000\u0000\u0000\u0103\u00fe"+ - "\u0001\u0000\u0000\u0000\u0103\u00ff\u0001\u0000\u0000\u0000\u0104\u001b"+ - "\u0001\u0000\u0000\u0000\u0105\u0106\u0005\u0006\u0000\u0000\u0106\u010b"+ - "\u0003*\u0015\u0000\u0107\u0108\u0005!\u0000\u0000\u0108\u010a\u0003*"+ - "\u0015\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u010a\u010d\u0001\u0000"+ - "\u0000\u0000\u010b\u0109\u0001\u0000\u0000\u0000\u010b\u010c\u0001\u0000"+ - "\u0000\u0000\u010c\u010f\u0001\u0000\u0000\u0000\u010d\u010b\u0001\u0000"+ - "\u0000\u0000\u010e\u0110\u0003\u001e\u000f\u0000\u010f\u010e\u0001\u0000"+ - "\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001d\u0001\u0000"+ - "\u0000\u0000\u0111\u0114\u0003 \u0010\u0000\u0112\u0114\u0003\"\u0011"+ - "\u0000\u0113\u0111\u0001\u0000\u0000\u0000\u0113\u0112\u0001\u0000\u0000"+ - "\u0000\u0114\u001f\u0001\u0000\u0000\u0000\u0115\u0116\u0005F\u0000\u0000"+ - "\u0116\u011b\u0003*\u0015\u0000\u0117\u0118\u0005!\u0000\u0000\u0118\u011a"+ - "\u0003*\u0015\u0000\u0119\u0117\u0001\u0000\u0000\u0000\u011a\u011d\u0001"+ - "\u0000\u0000\u0000\u011b\u0119\u0001\u0000\u0000\u0000\u011b\u011c\u0001"+ - "\u0000\u0000\u0000\u011c!\u0001\u0000\u0000\u0000\u011d\u011b\u0001\u0000"+ - "\u0000\u0000\u011e\u011f\u0005?\u0000\u0000\u011f\u0120\u0003 \u0010\u0000"+ - "\u0120\u0121\u0005@\u0000\u0000\u0121#\u0001\u0000\u0000\u0000\u0122\u0123"+ - "\u0005\u0004\u0000\u0000\u0123\u0124\u0003\u0018\f\u0000\u0124%\u0001"+ - "\u0000\u0000\u0000\u0125\u0127\u0005\u0010\u0000\u0000\u0126\u0128\u0003"+ - "\u0018\f\u0000\u0127\u0126\u0001\u0000\u0000\u0000\u0127\u0128\u0001\u0000"+ - "\u0000\u0000\u0128\u012b\u0001\u0000\u0000\u0000\u0129\u012a\u0005\u001d"+ - "\u0000\u0000\u012a\u012c\u0003\u0018\f\u0000\u012b\u0129\u0001\u0000\u0000"+ - "\u0000\u012b\u012c\u0001\u0000\u0000\u0000\u012c\'\u0001\u0000\u0000\u0000"+ - "\u012d\u012e\u0005\b\u0000\u0000\u012e\u0131\u0003\u0018\f\u0000\u012f"+ - "\u0130\u0005\u001d\u0000\u0000\u0130\u0132\u0003\u0018\f\u0000\u0131\u012f"+ - "\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132)\u0001"+ - "\u0000\u0000\u0000\u0133\u0134\u0007\u0002\u0000\u0000\u0134+\u0001\u0000"+ - "\u0000\u0000\u0135\u013a\u00030\u0018\u0000\u0136\u0137\u0005#\u0000\u0000"+ - "\u0137\u0139\u00030\u0018\u0000\u0138\u0136\u0001\u0000\u0000\u0000\u0139"+ - "\u013c\u0001\u0000\u0000\u0000\u013a\u0138\u0001\u0000\u0000\u0000\u013a"+ - "\u013b\u0001\u0000\u0000\u0000\u013b-\u0001\u0000\u0000\u0000\u013c\u013a"+ - "\u0001\u0000\u0000\u0000\u013d\u0142\u00032\u0019\u0000\u013e\u013f\u0005"+ - "#\u0000\u0000\u013f\u0141\u00032\u0019\u0000\u0140\u013e\u0001\u0000\u0000"+ - "\u0000\u0141\u0144\u0001\u0000\u0000\u0000\u0142\u0140\u0001\u0000\u0000"+ - "\u0000\u0142\u0143\u0001\u0000\u0000\u0000\u0143/\u0001\u0000\u0000\u0000"+ - "\u0144\u0142\u0001\u0000\u0000\u0000\u0145\u0146\u0007\u0003\u0000\u0000"+ - "\u01461\u0001\u0000\u0000\u0000\u0147\u0148\u0005K\u0000\u0000\u01483"+ - "\u0001\u0000\u0000\u0000\u0149\u0174\u0005,\u0000\u0000\u014a\u014b\u0003"+ - "T*\u0000\u014b\u014c\u0005A\u0000\u0000\u014c\u0174\u0001\u0000\u0000"+ - "\u0000\u014d\u0174\u0003R)\u0000\u014e\u0174\u0003T*\u0000\u014f\u0174"+ - "\u0003N\'\u0000\u0150\u0174\u0005/\u0000\u0000\u0151\u0174\u0003V+\u0000"+ - "\u0152\u0153\u0005?\u0000\u0000\u0153\u0158\u0003P(\u0000\u0154\u0155"+ - "\u0005!\u0000\u0000\u0155\u0157\u0003P(\u0000\u0156\u0154\u0001\u0000"+ - "\u0000\u0000\u0157\u015a\u0001\u0000\u0000\u0000\u0158\u0156\u0001\u0000"+ - "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015b\u0001\u0000"+ - "\u0000\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u015c\u0005@\u0000"+ - "\u0000\u015c\u0174\u0001\u0000\u0000\u0000\u015d\u015e\u0005?\u0000\u0000"+ - "\u015e\u0163\u0003N\'\u0000\u015f\u0160\u0005!\u0000\u0000\u0160\u0162"+ - "\u0003N\'\u0000\u0161\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001"+ - "\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001"+ - "\u0000\u0000\u0000\u0164\u0166\u0001\u0000\u0000\u0000\u0165\u0163\u0001"+ - "\u0000\u0000\u0000\u0166\u0167\u0005@\u0000\u0000\u0167\u0174\u0001\u0000"+ - "\u0000\u0000\u0168\u0169\u0005?\u0000\u0000\u0169\u016e\u0003V+\u0000"+ - "\u016a\u016b\u0005!\u0000\u0000\u016b\u016d\u0003V+\u0000\u016c\u016a"+ - "\u0001\u0000\u0000\u0000\u016d\u0170\u0001\u0000\u0000\u0000\u016e\u016c"+ - "\u0001\u0000\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0171"+ - "\u0001\u0000\u0000\u0000\u0170\u016e\u0001\u0000\u0000\u0000\u0171\u0172"+ - "\u0005@\u0000\u0000\u0172\u0174\u0001\u0000\u0000\u0000\u0173\u0149\u0001"+ - "\u0000\u0000\u0000\u0173\u014a\u0001\u0000\u0000\u0000\u0173\u014d\u0001"+ - "\u0000\u0000\u0000\u0173\u014e\u0001\u0000\u0000\u0000\u0173\u014f\u0001"+ - "\u0000\u0000\u0000\u0173\u0150\u0001\u0000\u0000\u0000\u0173\u0151\u0001"+ - "\u0000\u0000\u0000\u0173\u0152\u0001\u0000\u0000\u0000\u0173\u015d\u0001"+ - "\u0000\u0000\u0000\u0173\u0168\u0001\u0000\u0000\u0000\u01745\u0001\u0000"+ - "\u0000\u0000\u0175\u0176\u0005\n\u0000\u0000\u0176\u0177\u0005\u001b\u0000"+ - "\u0000\u01777\u0001\u0000\u0000\u0000\u0178\u0179\u0005\u000f\u0000\u0000"+ - "\u0179\u017e\u0003:\u001d\u0000\u017a\u017b\u0005!\u0000\u0000\u017b\u017d"+ - "\u0003:\u001d\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u0180\u0001"+ - "\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017e\u017f\u0001"+ - "\u0000\u0000\u0000\u017f9\u0001\u0000\u0000\u0000\u0180\u017e\u0001\u0000"+ - "\u0000\u0000\u0181\u0183\u0003\n\u0005\u0000\u0182\u0184\u0007\u0004\u0000"+ - "\u0000\u0183\u0182\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000\u0000"+ - "\u0000\u0184\u0187\u0001\u0000\u0000\u0000\u0185\u0186\u0005-\u0000\u0000"+ - "\u0186\u0188\u0007\u0005\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000"+ - "\u0187\u0188\u0001\u0000\u0000\u0000\u0188;\u0001\u0000\u0000\u0000\u0189"+ - "\u018a\u0005\t\u0000\u0000\u018a\u018f\u0003.\u0017\u0000\u018b\u018c"+ - "\u0005!\u0000\u0000\u018c\u018e\u0003.\u0017\u0000\u018d\u018b\u0001\u0000"+ - "\u0000\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u018d\u0001\u0000"+ - "\u0000\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u0190=\u0001\u0000\u0000"+ - "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0193\u0005\u0002\u0000"+ - "\u0000\u0193\u0198\u0003.\u0017\u0000\u0194\u0195\u0005!\u0000\u0000\u0195"+ - "\u0197\u0003.\u0017\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0197\u019a"+ - "\u0001\u0000\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0198\u0199"+ - "\u0001\u0000\u0000\u0000\u0199?\u0001\u0000\u0000\u0000\u019a\u0198\u0001"+ - "\u0000\u0000\u0000\u019b\u019c\u0005\f\u0000\u0000\u019c\u01a1\u0003B"+ - "!\u0000\u019d\u019e\u0005!\u0000\u0000\u019e\u01a0\u0003B!\u0000\u019f"+ - "\u019d\u0001\u0000\u0000\u0000\u01a0\u01a3\u0001\u0000\u0000\u0000\u01a1"+ - "\u019f\u0001\u0000\u0000\u0000\u01a1\u01a2\u0001\u0000\u0000\u0000\u01a2"+ - "A\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a5"+ - "\u0003.\u0017\u0000\u01a5\u01a6\u0005O\u0000\u0000\u01a6\u01a7\u0003."+ - "\u0017\u0000\u01a7C\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005\u0001\u0000"+ - "\u0000\u01a9\u01aa\u0003\u0012\t\u0000\u01aa\u01ac\u0003V+\u0000\u01ab"+ - "\u01ad\u0003J%\u0000\u01ac\u01ab\u0001\u0000\u0000\u0000\u01ac\u01ad\u0001"+ - "\u0000\u0000\u0000\u01adE\u0001\u0000\u0000\u0000\u01ae\u01af\u0005\u0007"+ - "\u0000\u0000\u01af\u01b0\u0003\u0012\t\u0000\u01b0\u01b1\u0003V+\u0000"+ - "\u01b1G\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005\u000b\u0000\u0000\u01b3"+ - "\u01b4\u0003,\u0016\u0000\u01b4I\u0001\u0000\u0000\u0000\u01b5\u01ba\u0003"+ - "L&\u0000\u01b6\u01b7\u0005!\u0000\u0000\u01b7\u01b9\u0003L&\u0000\u01b8"+ - "\u01b6\u0001\u0000\u0000\u0000\u01b9\u01bc\u0001\u0000\u0000\u0000\u01ba"+ - "\u01b8\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb"+ - "K\u0001\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01be"+ - "\u00030\u0018\u0000\u01be\u01bf\u0005 \u0000\u0000\u01bf\u01c0\u00034"+ - "\u001a\u0000\u01c0M\u0001\u0000\u0000\u0000\u01c1\u01c2\u0007\u0006\u0000"+ - "\u0000\u01c2O\u0001\u0000\u0000\u0000\u01c3\u01c6\u0003R)\u0000\u01c4"+ - "\u01c6\u0003T*\u0000\u01c5\u01c3\u0001\u0000\u0000\u0000\u01c5\u01c4\u0001"+ - "\u0000\u0000\u0000\u01c6Q\u0001\u0000\u0000\u0000\u01c7\u01c9\u0007\u0000"+ - "\u0000\u0000\u01c8\u01c7\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca\u01cb\u0005\u001c"+ - "\u0000\u0000\u01cbS\u0001\u0000\u0000\u0000\u01cc\u01ce\u0007\u0000\u0000"+ - "\u0000\u01cd\u01cc\u0001\u0000\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ - "\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005\u001b\u0000"+ - "\u0000\u01d0U\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005\u001a\u0000\u0000"+ - "\u01d2W\u0001\u0000\u0000\u0000\u01d3\u01d4\u0007\u0007\u0000\u0000\u01d4"+ - "Y\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005\u0005\u0000\u0000\u01d6\u01d7"+ - "\u0003\\.\u0000\u01d7[\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005?\u0000"+ - "\u0000\u01d9\u01da\u0003\u0002\u0001\u0000\u01da\u01db\u0005@\u0000\u0000"+ - "\u01db]\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000\u01dd"+ - "\u01e1\u0005_\u0000\u0000\u01de\u01df\u0005\u000e\u0000\u0000\u01df\u01e1"+ - "\u0005`\u0000\u0000\u01e0\u01dc\u0001\u0000\u0000\u0000\u01e0\u01de\u0001"+ - "\u0000\u0000\u0000\u01e1_\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\u0003"+ - "\u0000\u0000\u01e3\u01e6\u0005U\u0000\u0000\u01e4\u01e5\u0005S\u0000\u0000"+ - "\u01e5\u01e7\u0003.\u0017\u0000\u01e6\u01e4\u0001\u0000\u0000\u0000\u01e6"+ - "\u01e7\u0001\u0000\u0000\u0000\u01e7\u01f1\u0001\u0000\u0000\u0000\u01e8"+ - "\u01e9\u0005T\u0000\u0000\u01e9\u01ee\u0003b1\u0000\u01ea\u01eb\u0005"+ - "!\u0000\u0000\u01eb\u01ed\u0003b1\u0000\u01ec\u01ea\u0001\u0000\u0000"+ - "\u0000\u01ed\u01f0\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000"+ - "\u0000\u01ee\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f2\u0001\u0000\u0000"+ - "\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000\u01f1\u01e8\u0001\u0000\u0000"+ - "\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2a\u0001\u0000\u0000\u0000"+ - "\u01f3\u01f4\u0003.\u0017\u0000\u01f4\u01f5\u0005 \u0000\u0000\u01f5\u01f7"+ - "\u0001\u0000\u0000\u0000\u01f6\u01f3\u0001\u0000\u0000\u0000\u01f6\u01f7"+ - "\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9"+ - "\u0003.\u0017\u0000\u01f9c\u0001\u0000\u0000\u00002ov\u0085\u0091\u009a"+ - "\u00a2\u00a6\u00ae\u00b0\u00b5\u00bc\u00c1\u00c8\u00ce\u00d6\u00d8\u00e2"+ - "\u00ec\u00ef\u00fb\u0103\u010b\u010f\u0113\u011b\u0127\u012b\u0131\u013a"+ - "\u0142\u0158\u0163\u016e\u0173\u017e\u0183\u0187\u018f\u0198\u01a1\u01ac"+ - "\u01ba\u01c5\u01c8\u01cd\u01e0\u01e6\u01ee\u01f1\u01f6"; + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bd\u0000\b\u0001\u0000;<\u0001"+ + "\u0000=?\u0002\u0000CCHH\u0001\u0000BC\u0002\u0000 ##\u0001\u0000&\'"+ + "\u0002\u0000%%33\u0002\u0000446:\u0217\u0000f\u0001\u0000\u0000\u0000"+ + "\u0002i\u0001\u0000\u0000\u0000\u0004y\u0001\u0000\u0000\u0000\u0006\u0088"+ + "\u0001\u0000\u0000\u0000\b\u008a\u0001\u0000\u0000\u0000\n\u00a9\u0001"+ + "\u0000\u0000\u0000\f\u00c4\u0001\u0000\u0000\u0000\u000e\u00cb\u0001\u0000"+ + "\u0000\u0000\u0010\u00d1\u0001\u0000\u0000\u0000\u0012\u00e5\u0001\u0000"+ + "\u0000\u0000\u0014\u00e7\u0001\u0000\u0000\u0000\u0016\u00f6\u0001\u0000"+ + "\u0000\u0000\u0018\u00f9\u0001\u0000\u0000\u0000\u001a\u0106\u0001\u0000"+ + "\u0000\u0000\u001c\u0108\u0001\u0000\u0000\u0000\u001e\u0116\u0001\u0000"+ + "\u0000\u0000 \u0118\u0001\u0000\u0000\u0000\"\u0121\u0001\u0000\u0000"+ + "\u0000$\u0125\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000("+ + "\u0130\u0001\u0000\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u0138\u0001"+ + "\u0000\u0000\u0000.\u0140\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000"+ + "\u00002\u014a\u0001\u0000\u0000\u00004\u0176\u0001\u0000\u0000\u00006"+ + "\u0178\u0001\u0000\u0000\u00008\u017b\u0001\u0000\u0000\u0000:\u0184\u0001"+ + "\u0000\u0000\u0000<\u018c\u0001\u0000\u0000\u0000>\u0195\u0001\u0000\u0000"+ + "\u0000@\u019e\u0001\u0000\u0000\u0000B\u01a7\u0001\u0000\u0000\u0000D"+ + "\u01ab\u0001\u0000\u0000\u0000F\u01b1\u0001\u0000\u0000\u0000H\u01b5\u0001"+ + "\u0000\u0000\u0000J\u01b8\u0001\u0000\u0000\u0000L\u01c0\u0001\u0000\u0000"+ + "\u0000N\u01c4\u0001\u0000\u0000\u0000P\u01c8\u0001\u0000\u0000\u0000R"+ + "\u01cb\u0001\u0000\u0000\u0000T\u01d0\u0001\u0000\u0000\u0000V\u01d4\u0001"+ + "\u0000\u0000\u0000X\u01d6\u0001\u0000\u0000\u0000Z\u01d8\u0001\u0000\u0000"+ + "\u0000\\\u01db\u0001\u0000\u0000\u0000^\u01df\u0001\u0000\u0000\u0000"+ + "`\u01e2\u0001\u0000\u0000\u0000b\u01e5\u0001\u0000\u0000\u0000d\u01f9"+ + "\u0001\u0000\u0000\u0000fg\u0003\u0002\u0001\u0000gh\u0005\u0000\u0000"+ + "\u0001h\u0001\u0001\u0000\u0000\u0000ij\u0006\u0001\uffff\uffff\u0000"+ + "jk\u0003\u0004\u0002\u0000kq\u0001\u0000\u0000\u0000lm\n\u0001\u0000\u0000"+ + "mn\u0005\u001a\u0000\u0000np\u0003\u0006\u0003\u0000ol\u0001\u0000\u0000"+ + "\u0000ps\u0001\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000qr\u0001\u0000"+ + "\u0000\u0000r\u0003\u0001\u0000\u0000\u0000sq\u0001\u0000\u0000\u0000"+ + "tz\u0003Z-\u0000uz\u0003\u001c\u000e\u0000vz\u0003\u0016\u000b\u0000w"+ + "z\u0003^/\u0000xz\u0003`0\u0000yt\u0001\u0000\u0000\u0000yu\u0001\u0000"+ + "\u0000\u0000yv\u0001\u0000\u0000\u0000yw\u0001\u0000\u0000\u0000yx\u0001"+ + "\u0000\u0000\u0000z\u0005\u0001\u0000\u0000\u0000{\u0089\u0003$\u0012"+ + "\u0000|\u0089\u0003(\u0014\u0000}\u0089\u00036\u001b\u0000~\u0089\u0003"+ + "<\u001e\u0000\u007f\u0089\u00038\u001c\u0000\u0080\u0089\u0003&\u0013"+ + "\u0000\u0081\u0089\u0003\b\u0004\u0000\u0082\u0089\u0003>\u001f\u0000"+ + "\u0083\u0089\u0003@ \u0000\u0084\u0089\u0003D\"\u0000\u0085\u0089\u0003"+ + "F#\u0000\u0086\u0089\u0003b1\u0000\u0087\u0089\u0003H$\u0000\u0088{\u0001"+ + "\u0000\u0000\u0000\u0088|\u0001\u0000\u0000\u0000\u0088}\u0001\u0000\u0000"+ + "\u0000\u0088~\u0001\u0000\u0000\u0000\u0088\u007f\u0001\u0000\u0000\u0000"+ + "\u0088\u0080\u0001\u0000\u0000\u0000\u0088\u0081\u0001\u0000\u0000\u0000"+ + "\u0088\u0082\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000"+ + "\u0088\u0084\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000\u0000\u0000"+ + "\u0088\u0086\u0001\u0000\u0000\u0000\u0088\u0087\u0001\u0000\u0000\u0000"+ + "\u0089\u0007\u0001\u0000\u0000\u0000\u008a\u008b\u0005\u0012\u0000\u0000"+ + "\u008b\u008c\u0003\n\u0005\u0000\u008c\t\u0001\u0000\u0000\u0000\u008d"+ + "\u008e\u0006\u0005\uffff\uffff\u0000\u008e\u008f\u0005,\u0000\u0000\u008f"+ + "\u00aa\u0003\n\u0005\u0007\u0090\u00aa\u0003\u000e\u0007\u0000\u0091\u00aa"+ + "\u0003\f\u0006\u0000\u0092\u0094\u0003\u000e\u0007\u0000\u0093\u0095\u0005"+ + ",\u0000\u0000\u0094\u0093\u0001\u0000\u0000\u0000\u0094\u0095\u0001\u0000"+ + "\u0000\u0000\u0095\u0096\u0001\u0000\u0000\u0000\u0096\u0097\u0005)\u0000"+ + "\u0000\u0097\u0098\u0005(\u0000\u0000\u0098\u009d\u0003\u000e\u0007\u0000"+ + "\u0099\u009a\u0005\"\u0000\u0000\u009a\u009c\u0003\u000e\u0007\u0000\u009b"+ + "\u0099\u0001\u0000\u0000\u0000\u009c\u009f\u0001\u0000\u0000\u0000\u009d"+ + "\u009b\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e"+ + "\u00a0\u0001\u0000\u0000\u0000\u009f\u009d\u0001\u0000\u0000\u0000\u00a0"+ + "\u00a1\u00052\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000\u00a2\u00a3"+ + "\u0003\u000e\u0007\u0000\u00a3\u00a5\u0005*\u0000\u0000\u00a4\u00a6\u0005"+ + ",\u0000\u0000\u00a5\u00a4\u0001\u0000\u0000\u0000\u00a5\u00a6\u0001\u0000"+ + "\u0000\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a8\u0005-\u0000"+ + "\u0000\u00a8\u00aa\u0001\u0000\u0000\u0000\u00a9\u008d\u0001\u0000\u0000"+ + "\u0000\u00a9\u0090\u0001\u0000\u0000\u0000\u00a9\u0091\u0001\u0000\u0000"+ + "\u0000\u00a9\u0092\u0001\u0000\u0000\u0000\u00a9\u00a2\u0001\u0000\u0000"+ + "\u0000\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\n\u0004\u0000\u0000"+ + "\u00ac\u00ad\u0005\u001f\u0000\u0000\u00ad\u00b2\u0003\n\u0005\u0005\u00ae"+ + "\u00af\n\u0003\u0000\u0000\u00af\u00b0\u0005/\u0000\u0000\u00b0\u00b2"+ + "\u0003\n\u0005\u0004\u00b1\u00ab\u0001\u0000\u0000\u0000\u00b1\u00ae\u0001"+ + "\u0000\u0000\u0000\u00b2\u00b5\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001"+ + "\u0000\u0000\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u000b\u0001"+ + "\u0000\u0000\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b8\u0003"+ + "\u000e\u0007\u0000\u00b7\u00b9\u0005,\u0000\u0000\u00b8\u00b7\u0001\u0000"+ + "\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000"+ + "\u0000\u0000\u00ba\u00bb\u0005+\u0000\u0000\u00bb\u00bc\u0003V+\u0000"+ + "\u00bc\u00c5\u0001\u0000\u0000\u0000\u00bd\u00bf\u0003\u000e\u0007\u0000"+ + "\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf"+ + "\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1"+ + "\u00c2\u00051\u0000\u0000\u00c2\u00c3\u0003V+\u0000\u00c3\u00c5\u0001"+ + "\u0000\u0000\u0000\u00c4\u00b6\u0001\u0000\u0000\u0000\u00c4\u00bd\u0001"+ + "\u0000\u0000\u0000\u00c5\r\u0001\u0000\u0000\u0000\u00c6\u00cc\u0003\u0010"+ + "\b\u0000\u00c7\u00c8\u0003\u0010\b\u0000\u00c8\u00c9\u0003X,\u0000\u00c9"+ + "\u00ca\u0003\u0010\b\u0000\u00ca\u00cc\u0001\u0000\u0000\u0000\u00cb\u00c6"+ + "\u0001\u0000\u0000\u0000\u00cb\u00c7\u0001\u0000\u0000\u0000\u00cc\u000f"+ + "\u0001\u0000\u0000\u0000\u00cd\u00ce\u0006\b\uffff\uffff\u0000\u00ce\u00d2"+ + "\u0003\u0012\t\u0000\u00cf\u00d0\u0007\u0000\u0000\u0000\u00d0\u00d2\u0003"+ + "\u0010\b\u0003\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d1\u00cf\u0001\u0000"+ + "\u0000\u0000\u00d2\u00db\u0001\u0000\u0000\u0000\u00d3\u00d4\n\u0002\u0000"+ + "\u0000\u00d4\u00d5\u0007\u0001\u0000\u0000\u00d5\u00da\u0003\u0010\b\u0003"+ + "\u00d6\u00d7\n\u0001\u0000\u0000\u00d7\u00d8\u0007\u0000\u0000\u0000\u00d8"+ + "\u00da\u0003\u0010\b\u0002\u00d9\u00d3\u0001\u0000\u0000\u0000\u00d9\u00d6"+ + "\u0001\u0000\u0000\u0000\u00da\u00dd\u0001\u0000\u0000\u0000\u00db\u00d9"+ + "\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000\u00dc\u0011"+ + "\u0001\u0000\u0000\u0000\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e6"+ + "\u00034\u001a\u0000\u00df\u00e6\u0003,\u0016\u0000\u00e0\u00e6\u0003\u0014"+ + "\n\u0000\u00e1\u00e2\u0005(\u0000\u0000\u00e2\u00e3\u0003\n\u0005\u0000"+ + "\u00e3\u00e4\u00052\u0000\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5"+ + "\u00de\u0001\u0000\u0000\u0000\u00e5\u00df\u0001\u0000\u0000\u0000\u00e5"+ + "\u00e0\u0001\u0000\u0000\u0000\u00e5\u00e1\u0001\u0000\u0000\u0000\u00e6"+ + "\u0013\u0001\u0000\u0000\u0000\u00e7\u00e8\u00030\u0018\u0000\u00e8\u00f2"+ + "\u0005(\u0000\u0000\u00e9\u00f3\u0005=\u0000\u0000\u00ea\u00ef\u0003\n"+ + "\u0005\u0000\u00eb\u00ec\u0005\"\u0000\u0000\u00ec\u00ee\u0003\n\u0005"+ + "\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f1\u0001\u0000\u0000"+ + "\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000"+ + "\u0000\u00f0\u00f3\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000"+ + "\u0000\u00f2\u00e9\u0001\u0000\u0000\u0000\u00f2\u00ea\u0001\u0000\u0000"+ + "\u0000\u00f2\u00f3\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001\u0000\u0000"+ + "\u0000\u00f4\u00f5\u00052\u0000\u0000\u00f5\u0015\u0001\u0000\u0000\u0000"+ + "\u00f6\u00f7\u0005\u000e\u0000\u0000\u00f7\u00f8\u0003\u0018\f\u0000\u00f8"+ + "\u0017\u0001\u0000\u0000\u0000\u00f9\u00fe\u0003\u001a\r\u0000\u00fa\u00fb"+ + "\u0005\"\u0000\u0000\u00fb\u00fd\u0003\u001a\r\u0000\u00fc\u00fa\u0001"+ + "\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001"+ + "\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000\u0000\u00ff\u0019\u0001"+ + "\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000\u0000\u0101\u0107\u0003"+ + "\n\u0005\u0000\u0102\u0103\u0003,\u0016\u0000\u0103\u0104\u0005!\u0000"+ + "\u0000\u0104\u0105\u0003\n\u0005\u0000\u0105\u0107\u0001\u0000\u0000\u0000"+ + "\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000"+ + "\u0107\u001b\u0001\u0000\u0000\u0000\u0108\u0109\u0005\u0006\u0000\u0000"+ + "\u0109\u010e\u0003*\u0015\u0000\u010a\u010b\u0005\"\u0000\u0000\u010b"+ + "\u010d\u0003*\u0015\u0000\u010c\u010a\u0001\u0000\u0000\u0000\u010d\u0110"+ + "\u0001\u0000\u0000\u0000\u010e\u010c\u0001\u0000\u0000\u0000\u010e\u010f"+ + "\u0001\u0000\u0000\u0000\u010f\u0112\u0001\u0000\u0000\u0000\u0110\u010e"+ + "\u0001\u0000\u0000\u0000\u0111\u0113\u0003\u001e\u000f\u0000\u0112\u0111"+ + "\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001d"+ + "\u0001\u0000\u0000\u0000\u0114\u0117\u0003 \u0010\u0000\u0115\u0117\u0003"+ + "\"\u0011\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0115\u0001\u0000"+ + "\u0000\u0000\u0117\u001f\u0001\u0000\u0000\u0000\u0118\u0119\u0005G\u0000"+ + "\u0000\u0119\u011e\u0003*\u0015\u0000\u011a\u011b\u0005\"\u0000\u0000"+ + "\u011b\u011d\u0003*\u0015\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d"+ + "\u0120\u0001\u0000\u0000\u0000\u011e\u011c\u0001\u0000\u0000\u0000\u011e"+ + "\u011f\u0001\u0000\u0000\u0000\u011f!\u0001\u0000\u0000\u0000\u0120\u011e"+ + "\u0001\u0000\u0000\u0000\u0121\u0122\u0005@\u0000\u0000\u0122\u0123\u0003"+ + " \u0010\u0000\u0123\u0124\u0005A\u0000\u0000\u0124#\u0001\u0000\u0000"+ + "\u0000\u0125\u0126\u0005\u0004\u0000\u0000\u0126\u0127\u0003\u0018\f\u0000"+ + "\u0127%\u0001\u0000\u0000\u0000\u0128\u012a\u0005\u0011\u0000\u0000\u0129"+ + "\u012b\u0003\u0018\f\u0000\u012a\u0129\u0001\u0000\u0000\u0000\u012a\u012b"+ + "\u0001\u0000\u0000\u0000\u012b\u012e\u0001\u0000\u0000\u0000\u012c\u012d"+ + "\u0005\u001e\u0000\u0000\u012d\u012f\u0003\u0018\f\u0000\u012e\u012c\u0001"+ + "\u0000\u0000\u0000\u012e\u012f\u0001\u0000\u0000\u0000\u012f\'\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0005\b\u0000\u0000\u0131\u0134\u0003\u0018\f"+ + "\u0000\u0132\u0133\u0005\u001e\u0000\u0000\u0133\u0135\u0003\u0018\f\u0000"+ + "\u0134\u0132\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000"+ + "\u0135)\u0001\u0000\u0000\u0000\u0136\u0137\u0007\u0002\u0000\u0000\u0137"+ + "+\u0001\u0000\u0000\u0000\u0138\u013d\u00030\u0018\u0000\u0139\u013a\u0005"+ + "$\u0000\u0000\u013a\u013c\u00030\u0018\u0000\u013b\u0139\u0001\u0000\u0000"+ + "\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000"+ + "\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e-\u0001\u0000\u0000\u0000"+ + "\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0145\u00032\u0019\u0000\u0141"+ + "\u0142\u0005$\u0000\u0000\u0142\u0144\u00032\u0019\u0000\u0143\u0141\u0001"+ + "\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143\u0001"+ + "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146/\u0001\u0000"+ + "\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u0149\u0007\u0003"+ + "\u0000\u0000\u01491\u0001\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000"+ + "\u014b3\u0001\u0000\u0000\u0000\u014c\u0177\u0005-\u0000\u0000\u014d\u014e"+ + "\u0003T*\u0000\u014e\u014f\u0005B\u0000\u0000\u014f\u0177\u0001\u0000"+ + "\u0000\u0000\u0150\u0177\u0003R)\u0000\u0151\u0177\u0003T*\u0000\u0152"+ + "\u0177\u0003N\'\u0000\u0153\u0177\u00050\u0000\u0000\u0154\u0177\u0003"+ + "V+\u0000\u0155\u0156\u0005@\u0000\u0000\u0156\u015b\u0003P(\u0000\u0157"+ + "\u0158\u0005\"\u0000\u0000\u0158\u015a\u0003P(\u0000\u0159\u0157\u0001"+ + "\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000\u0000\u015b\u0159\u0001"+ + "\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000\u0000\u015c\u015e\u0001"+ + "\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u015f\u0005"+ + "A\u0000\u0000\u015f\u0177\u0001\u0000\u0000\u0000\u0160\u0161\u0005@\u0000"+ + "\u0000\u0161\u0166\u0003N\'\u0000\u0162\u0163\u0005\"\u0000\u0000\u0163"+ + "\u0165\u0003N\'\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0165\u0168"+ + "\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0166\u0167"+ + "\u0001\u0000\u0000\u0000\u0167\u0169\u0001\u0000\u0000\u0000\u0168\u0166"+ + "\u0001\u0000\u0000\u0000\u0169\u016a\u0005A\u0000\u0000\u016a\u0177\u0001"+ + "\u0000\u0000\u0000\u016b\u016c\u0005@\u0000\u0000\u016c\u0171\u0003V+"+ + "\u0000\u016d\u016e\u0005\"\u0000\u0000\u016e\u0170\u0003V+\u0000\u016f"+ + "\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000\u0000\u0171"+ + "\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000\u0172"+ + "\u0174\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000\u0174"+ + "\u0175\u0005A\u0000\u0000\u0175\u0177\u0001\u0000\u0000\u0000\u0176\u014c"+ + "\u0001\u0000\u0000\u0000\u0176\u014d\u0001\u0000\u0000\u0000\u0176\u0150"+ + "\u0001\u0000\u0000\u0000\u0176\u0151\u0001\u0000\u0000\u0000\u0176\u0152"+ + "\u0001\u0000\u0000\u0000\u0176\u0153\u0001\u0000\u0000\u0000\u0176\u0154"+ + "\u0001\u0000\u0000\u0000\u0176\u0155\u0001\u0000\u0000\u0000\u0176\u0160"+ + "\u0001\u0000\u0000\u0000\u0176\u016b\u0001\u0000\u0000\u0000\u01775\u0001"+ + "\u0000\u0000\u0000\u0178\u0179\u0005\n\u0000\u0000\u0179\u017a\u0005\u001c"+ + "\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b\u017c\u0005\u0010\u0000"+ + "\u0000\u017c\u0181\u0003:\u001d\u0000\u017d\u017e\u0005\"\u0000\u0000"+ + "\u017e\u0180\u0003:\u001d\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180"+ + "\u0183\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0181"+ + "\u0182\u0001\u0000\u0000\u0000\u01829\u0001\u0000\u0000\u0000\u0183\u0181"+ + "\u0001\u0000\u0000\u0000\u0184\u0186\u0003\n\u0005\u0000\u0185\u0187\u0007"+ + "\u0004\u0000\u0000\u0186\u0185\u0001\u0000\u0000\u0000\u0186\u0187\u0001"+ + "\u0000\u0000\u0000\u0187\u018a\u0001\u0000\u0000\u0000\u0188\u0189\u0005"+ + ".\u0000\u0000\u0189\u018b\u0007\u0005\u0000\u0000\u018a\u0188\u0001\u0000"+ + "\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001\u0000\u0000"+ + "\u0000\u018c\u018d\u0005\t\u0000\u0000\u018d\u0192\u0003.\u0017\u0000"+ + "\u018e\u018f\u0005\"\u0000\u0000\u018f\u0191\u0003.\u0017\u0000\u0190"+ + "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ + "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196"+ + "\u0005\u0002\u0000\u0000\u0196\u019b\u0003.\u0017\u0000\u0197\u0198\u0005"+ + "\"\u0000\u0000\u0198\u019a\u0003.\u0017\u0000\u0199\u0197\u0001\u0000"+ + "\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000"+ + "\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c?\u0001\u0000\u0000"+ + "\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e\u019f\u0005\r\u0000\u0000"+ + "\u019f\u01a4\u0003B!\u0000\u01a0\u01a1\u0005\"\u0000\u0000\u01a1\u01a3"+ + "\u0003B!\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a3\u01a6\u0001\u0000"+ + "\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000"+ + "\u0000\u0000\u01a5A\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000"+ + "\u0000\u01a7\u01a8\u0003.\u0017\u0000\u01a8\u01a9\u0005P\u0000\u0000\u01a9"+ + "\u01aa\u0003.\u0017\u0000\u01aaC\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005"+ + "\u0001\u0000\u0000\u01ac\u01ad\u0003\u0012\t\u0000\u01ad\u01af\u0003V"+ + "+\u0000\u01ae\u01b0\u0003J%\u0000\u01af\u01ae\u0001\u0000\u0000\u0000"+ + "\u01af\u01b0\u0001\u0000\u0000\u0000\u01b0E\u0001\u0000\u0000\u0000\u01b1"+ + "\u01b2\u0005\u0007\u0000\u0000\u01b2\u01b3\u0003\u0012\t\u0000\u01b3\u01b4"+ + "\u0003V+\u0000\u01b4G\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005\f\u0000"+ + "\u0000\u01b6\u01b7\u0003,\u0016\u0000\u01b7I\u0001\u0000\u0000\u0000\u01b8"+ + "\u01bd\u0003L&\u0000\u01b9\u01ba\u0005\"\u0000\u0000\u01ba\u01bc\u0003"+ + "L&\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001\u0000\u0000"+ + "\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001\u0000\u0000"+ + "\u0000\u01beK\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000"+ + "\u01c0\u01c1\u00030\u0018\u0000\u01c1\u01c2\u0005!\u0000\u0000\u01c2\u01c3"+ + "\u00034\u001a\u0000\u01c3M\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u0006"+ + "\u0000\u0000\u01c5O\u0001\u0000\u0000\u0000\u01c6\u01c9\u0003R)\u0000"+ + "\u01c7\u01c9\u0003T*\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c8\u01c7"+ + "\u0001\u0000\u0000\u0000\u01c9Q\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007"+ + "\u0000\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001"+ + "\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ + "\u001d\u0000\u0000\u01ceS\u0001\u0000\u0000\u0000\u01cf\u01d1\u0007\u0000"+ + "\u0000\u0000\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u001c"+ + "\u0000\u0000\u01d3U\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005\u001b\u0000"+ + "\u0000\u01d5W\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0007\u0000\u0000"+ + "\u01d7Y\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005\u0005\u0000\u0000\u01d9"+ + "\u01da\u0003\\.\u0000\u01da[\u0001\u0000\u0000\u0000\u01db\u01dc\u0005"+ + "@\u0000\u0000\u01dc\u01dd\u0003\u0002\u0001\u0000\u01dd\u01de\u0005A\u0000"+ + "\u0000\u01de]\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u000f\u0000\u0000"+ + "\u01e0\u01e1\u0005`\u0000\u0000\u01e1_\u0001\u0000\u0000\u0000\u01e2\u01e3"+ + "\u0005\u000b\u0000\u0000\u01e3\u01e4\u0005d\u0000\u0000\u01e4a\u0001\u0000"+ + "\u0000\u0000\u01e5\u01e6\u0005\u0003\u0000\u0000\u01e6\u01e9\u0005V\u0000"+ + "\u0000\u01e7\u01e8\u0005T\u0000\u0000\u01e8\u01ea\u0003.\u0017\u0000\u01e9"+ + "\u01e7\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea"+ + "\u01f4\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005U\u0000\u0000\u01ec\u01f1"+ + "\u0003d2\u0000\u01ed\u01ee\u0005\"\u0000\u0000\u01ee\u01f0\u0003d2\u0000"+ + "\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000"+ + "\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000"+ + "\u01f4\u01eb\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ + "\u01f5c\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003.\u0017\u0000\u01f7\u01f8"+ + "\u0005!\u0000\u0000\u01f8\u01fa\u0001\u0000\u0000\u0000\u01f9\u01f6\u0001"+ + "\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001"+ + "\u0000\u0000\u0000\u01fb\u01fc\u0003.\u0017\u0000\u01fce\u0001\u0000\u0000"+ + "\u00001qy\u0088\u0094\u009d\u00a5\u00a9\u00b1\u00b3\u00b8\u00bf\u00c4"+ + "\u00cb\u00d1\u00d9\u00db\u00e5\u00ef\u00f2\u00fe\u0106\u010e\u0112\u0116"+ + "\u011e\u012a\u012e\u0134\u013d\u0145\u015b\u0166\u0171\u0176\u0181\u0186"+ + "\u018a\u0192\u019b\u01a4\u01af\u01bd\u01c8\u01cb\u01d0\u01e9\u01f1\u01f4"+ + "\u01f9"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index ffbcfc57a90ea..6737e782025b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -845,13 +845,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

    The default implementation does nothing.

    */ - @Override public void enterShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { } + @Override public void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { } + @Override public void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index ca876efb3e7da..95502053521d6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -501,7 +501,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { return visitChildren(ctx); } + @Override public T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 75656c1df76dc..433eba1a14999 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -754,17 +754,17 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitShowInfo(EsqlBaseParser.ShowInfoContext ctx); /** - * Enter a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Enter a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree */ - void enterShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** - * Exit a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Exit a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree */ - void exitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 0c3cc791f7fe2..323eb46a42fda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -453,12 +453,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitShowInfo(EsqlBaseParser.ShowInfoContext ctx); /** - * Visit a parse tree produced by the {@code showFunctions} - * labeled alternative in {@link EsqlBaseParser#showCommand}. + * Visit a parse tree produced by the {@code metaFunctions} + * labeled alternative in {@link EsqlBaseParser#metaCommand}. * @param ctx the parse tree * @return the visitor result */ - T visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx); + T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 8f4f942551002..5ae0584d28a44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -315,8 +315,8 @@ public LogicalPlan visitShowInfo(EsqlBaseParser.ShowInfoContext ctx) { } @Override - public LogicalPlan visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { - return new ShowFunctions(source(ctx)); + public LogicalPlan visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { + return new MetaFunctions(source(ctx)); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java similarity index 96% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index 5a4b90c45f23d..55508a7704346 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/show/ShowFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.plan.logical.show; +package org.elasticsearch.xpack.esql.plan.logical.meta; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; @@ -28,11 +28,11 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; -public class ShowFunctions extends LeafPlan { +public class MetaFunctions extends LeafPlan { private final List attributes; - public ShowFunctions(Source source) { + public MetaFunctions(Source source) { super(source); attributes = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index fd0801d35958d..516c88b5f6526 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -85,8 +85,8 @@ public PhysicalPlan map(LogicalPlan p) { } // Commands - if (p instanceof ShowFunctions showFunctions) { - return new ShowExec(showFunctions.source(), showFunctions.output(), showFunctions.values(functionRegistry)); + if (p instanceof MetaFunctions metaFunctions) { + return new ShowExec(metaFunctions.source(), metaFunctions.output(), metaFunctions.values(functionRegistry)); } if (p instanceof ShowInfo showInfo) { return new ShowExec(showInfo.source(), showInfo.output(), showInfo.values()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index d30aff3139495..dd1d9cffeeff1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; -import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -43,12 +43,13 @@ public enum FeatureMetric { WHERE(Filter.class::isInstance), ENRICH(Enrich.class::isInstance), MV_EXPAND(MvExpand.class::isInstance), - SHOW(plan -> plan instanceof ShowInfo || plan instanceof ShowFunctions), + SHOW(ShowInfo.class::isInstance), ROW(Row.class::isInstance), FROM(EsRelation.class::isInstance), DROP(Drop.class::isInstance), KEEP(Keep.class::isInstance), - RENAME(Rename.class::isInstance); + RENAME(Rename.class::isInstance), + META(MetaFunctions.class::isInstance); private Predicate planCheck; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index b1f8a4ed2f07d..f8607a101a93c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -583,7 +583,8 @@ public void testDeprecatedIsNullFunction() { public void testMetadataFieldOnOtherSources() { expectError("row a = 1 metadata _index", "line 1:20: extraneous input '_index' expecting "); - expectError("show functions metadata _index", "line 1:16: token recognition error at: 'm'"); + expectError("meta functions metadata _index", "line 1:16: token recognition error at: 'm'"); + expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'"); expectError( "explain [from foo] metadata _index", "line 1:20: mismatched input 'metadata' expecting {'|', ',', OPENING_BRACKET, ']', 'metadata'}" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 6dc15d67e0560..ab004a3a055ce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -24,6 +24,7 @@ import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.META; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; @@ -54,6 +55,7 @@ public void testDissectQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testEvalQuery() { @@ -73,6 +75,7 @@ public void testEvalQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testGrokQuery() { @@ -92,6 +95,7 @@ public void testGrokQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testLimitQuery() { @@ -111,6 +115,7 @@ public void testLimitQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testSortQuery() { @@ -130,6 +135,7 @@ public void testSortQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testStatsQuery() { @@ -149,6 +155,7 @@ public void testStatsQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testWhereQuery() { @@ -168,6 +175,7 @@ public void testWhereQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testTwoWhereQuery() { @@ -187,6 +195,7 @@ public void testTwoWhereQuery() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testTwoQueriesExecuted() { @@ -226,6 +235,7 @@ public void testTwoQueriesExecuted() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testEnrich() { @@ -251,6 +261,7 @@ public void testEnrich() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testMvExpand() { @@ -279,11 +290,31 @@ public void testMvExpand() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } - public void testShowFunctionsOrInfo() { - String showCommand = randomFrom("show functions", "show info"); - Counters c = esql(showCommand + " | stats a = count(*), b = count(*), c = count(*) | mv_expand c"); + public void testMetaFunctions() { + Counters c = esql("meta functions | stats a = count(*) | mv_expand a"); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(0, sort(c)); + assertEquals(1L, stats(c)); + assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(1L, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(0, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); + assertEquals(1L, meta(c)); + } + + public void testShowInfo() { + Counters c = esql("show info | stats a = count(*), b = count(*), c = count(*) | mv_expand c"); assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); @@ -299,6 +330,7 @@ public void testShowFunctionsOrInfo() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testRow() { @@ -318,6 +350,7 @@ public void testRow() { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } public void testDropAndRename() { @@ -337,6 +370,7 @@ public void testDropAndRename() { assertEquals(1L, drop(c)); assertEquals(0, keep(c)); assertEquals(1L, rename(c)); + assertEquals(0, meta(c)); } public void testKeep() { @@ -361,6 +395,7 @@ public void testKeep() { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); + assertEquals(0, meta(c)); } private long dissect(Counters c) { @@ -423,6 +458,10 @@ private long rename(Counters c) { return c.get(FPREFIX + RENAME); } + private long meta(Counters c) { + return c.get(FPREFIX + META); + } + private Counters esql(String esql) { return esql(esql, null); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 0e2838c976799..67dc4bbf2a06a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 8.11.99" - reason: "Latest, more complete, telemetry available in 8.12+" + version: " - 8.13.99" + reason: "Introduction of META tracking in 8.14+" - do: indices.create: @@ -23,7 +23,7 @@ setup: - do: {xpack.usage: {}} - match: { esql.available: true } - match: { esql.enabled: true } - - length: { esql.features: 15 } + - length: { esql.features: 16 } - set: {esql.features.dissect: dissect_counter} - set: {esql.features.drop: drop_counter} - set: {esql.features.eval: eval_counter} @@ -32,6 +32,7 @@ setup: - set: {esql.features.grok: grok_counter} - set: {esql.features.keep: keep_counter} - set: {esql.features.limit: limit_counter} + - set: {esql.features.meta: meta_counter} - set: {esql.features.mv_expand: mv_expand_counter} - set: {esql.features.rename: rename_counter} - set: {esql.features.row: row_counter} From c0476c1efbf6500f03be7e4ae1feca8a0fd4e026 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 19 Mar 2024 23:48:36 +1100 Subject: [PATCH 015/214] Trivial typo fix for #105774 (#106471) As the title says. --- .../troubleshooting/troubleshooting-unbalanced-cluster.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc b/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc index ca06ec066c8a8..a1d4f5df9c4f6 100644 --- a/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc +++ b/docs/reference/troubleshooting/troubleshooting-unbalanced-cluster.asciidoc @@ -46,7 +46,7 @@ Some operations such as node restarting, decommissioning, or changing cluster al are disruptive and might require multiple shards to move in order to rebalance the cluster. Shard movement order is not deterministic and mostly determined by the source and target node readiness to move a shard. -While rebalancing is in progress some nodes might appear busier then others. +While rebalancing is in progress some nodes might appear busier than others. When a shard is allocated to an undesired node it uses the resources of the current node instead of the target. This might cause a hotspot (disk or CPU) when multiple shards reside on the current node that have not been From 4e2f56b9b65065ece732d7415fb4f5a359d71cdb Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 19 Mar 2024 13:55:11 +0100 Subject: [PATCH 016/214] Inject authz denial messages (#106358) This PR moves authorization denied message creation from static methods to an injectable class. Relates: ES-7984 --- .../security/src/main/java/module-info.java | 1 + .../xpack/security/Security.java | 9 +- .../authz/AuthorizationDenialMessages.java | 294 ++++++++++-------- .../security/authz/AuthorizationService.java | 11 +- .../AuthorizationDenialMessagesTests.java | 39 ++- .../authz/AuthorizationServiceTests.java | 19 +- 6 files changed, 216 insertions(+), 157 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/module-info.java b/x-pack/plugin/security/src/main/java/module-info.java index 9806650f99094..cd1eb8a650149 100644 --- a/x-pack/plugin/security/src/main/java/module-info.java +++ b/x-pack/plugin/security/src/main/java/module-info.java @@ -65,6 +65,7 @@ exports org.elasticsearch.xpack.security.action.user to org.elasticsearch.server; exports org.elasticsearch.xpack.security.action.settings to org.elasticsearch.server; exports org.elasticsearch.xpack.security.operator to org.elasticsearch.internal.operator, org.elasticsearch.internal.security; + exports org.elasticsearch.xpack.security.authz to org.elasticsearch.internal.security; exports org.elasticsearch.xpack.security.authc to org.elasticsearch.xcontent; exports org.elasticsearch.xpack.security.slowlog to org.elasticsearch.server; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 3d27e9ee06ddb..ae6df838b4eac 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -290,6 +290,7 @@ import org.elasticsearch.xpack.security.authc.service.ServiceAccountService; import org.elasticsearch.xpack.security.authc.support.SecondaryAuthenticator; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.authz.AuthorizationDenialMessages; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.DlsFlsRequestCacheDifferentiator; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; @@ -581,6 +582,7 @@ public class Security extends Plugin private final SetOnce realms = new SetOnce<>(); private final SetOnce client = new SetOnce<>(); private final SetOnce> reloadableComponents = new SetOnce<>(); + private final SetOnce authorizationDenialMessages = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -1007,6 +1009,9 @@ Collection createComponents( } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); + if (authorizationDenialMessages.get() == null) { + authorizationDenialMessages.set(new AuthorizationDenialMessages.Default()); + } final AuthorizationService authzService = new AuthorizationService( settings, allRolesStore, @@ -1021,7 +1026,8 @@ Collection createComponents( getLicenseState(), expressionResolver, operatorPrivilegesService.get(), - restrictedIndices + restrictedIndices, + authorizationDenialMessages.get() ); components.add(nativeRolesStore); // used by roles actions @@ -2098,6 +2104,7 @@ public void loadExtensions(ExtensionLoader loader) { loadSingletonExtensionAndSetOnce(loader, bulkUpdateApiKeyRequestTranslator, BulkUpdateApiKeyRequestTranslator.class); loadSingletonExtensionAndSetOnce(loader, createApiKeyRequestBuilderFactory, CreateApiKeyRequestBuilderFactory.class); loadSingletonExtensionAndSetOnce(loader, hasPrivilegesRequestBuilderFactory, HasPrivilegesRequestBuilderFactory.class); + loadSingletonExtensionAndSetOnce(loader, authorizationDenialMessages, AuthorizationDenialMessages.class); } private void loadSingletonExtensionAndSetOnce(ExtensionLoader loader, SetOnce setOnce, Class clazz) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java index 9d66318a1f0fb..ae3a09af4751d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessages.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.Subject; -import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; @@ -29,158 +29,202 @@ import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.security.authz.AuthorizationService.isIndexAction; -class AuthorizationDenialMessages { - - private AuthorizationDenialMessages() {} - - static String runAsDenied(Authentication authentication, @Nullable AuthorizationInfo authorizationInfo, String action) { - assert authentication.isRunAs() : "constructing run as denied message but authentication for action was not run as"; - - String userText = authenticatedUserDescription(authentication); - String actionIsUnauthorizedMessage = actionIsUnauthorizedMessage(action, userText); - - String unauthorizedToRunAsMessage = "because " - + userText - + " is unauthorized to run as [" - + authentication.getEffectiveSubject().getUser().principal() - + "]"; - - return actionIsUnauthorizedMessage - + rolesDescription(authentication.getAuthenticatingSubject(), authorizationInfo.getAuthenticatedUserAuthorizationInfo()) - + ", " - + unauthorizedToRunAsMessage; - } - - static String actionDenied( +public interface AuthorizationDenialMessages { + String actionDenied( Authentication authentication, - @Nullable AuthorizationInfo authorizationInfo, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, String action, TransportRequest request, @Nullable String context - ) { - String userText = successfulAuthenticationDescription(authentication, authorizationInfo); - String remoteClusterText = authentication.isCrossClusterAccess() ? remoteClusterText(null) : ""; - String message = actionIsUnauthorizedMessage(action, remoteClusterText, userText); - if (context != null) { - message = message + " " + context; + ); + + String runAsDenied(Authentication authentication, @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, String action); + + String remoteActionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + String clusterAlias + ); + + class Default implements AuthorizationDenialMessages { + public Default() {} + + @Override + public String runAsDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action + ) { + assert authentication.isRunAs() : "constructing run as denied message but authentication for action was not run as"; + + String userText = authenticatedUserDescription(authentication); + String actionIsUnauthorizedMessage = actionIsUnauthorizedMessage(action, userText); + + String unauthorizedToRunAsMessage = "because " + + userText + + " is unauthorized to run as [" + + authentication.getEffectiveSubject().getUser().principal() + + "]"; + + return actionIsUnauthorizedMessage + + rolesDescription(authentication.getAuthenticatingSubject(), authorizationInfo.getAuthenticatedUserAuthorizationInfo()) + + ", " + + unauthorizedToRunAsMessage; } - if (ClusterPrivilegeResolver.isClusterAction(action)) { - final Collection privileges = ClusterPrivilegeResolver.findPrivilegesThatGrant(action, request, authentication); - if (privileges != null && privileges.size() > 0) { - message = message - + ", this action is granted by the cluster privileges [" - + collectionToCommaDelimitedString(privileges) - + "]"; + @Override + public String actionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + TransportRequest request, + @Nullable String context + ) { + String userText = successfulAuthenticationDescription(authentication, authorizationInfo); + String remoteClusterText = authentication.isCrossClusterAccess() ? remoteClusterText(null) : ""; + String message = actionIsUnauthorizedMessage(action, remoteClusterText, userText); + if (context != null) { + message = message + " " + context; } - } else if (isIndexAction(action)) { - final Collection privileges = IndexPrivilege.findPrivilegesThatGrant(action); - if (privileges != null && privileges.size() > 0) { - message = message - + ", this action is granted by the index privileges [" - + collectionToCommaDelimitedString(privileges) - + "]"; + + if (ClusterPrivilegeResolver.isClusterAction(action)) { + final Collection privileges = findClusterPrivilegesThatGrant(authentication, action, request); + if (privileges != null && privileges.size() > 0) { + message = message + + ", this action is granted by the cluster privileges [" + + collectionToCommaDelimitedString(privileges) + + "]"; + } + } else if (isIndexAction(action)) { + final Collection privileges = findIndexPrivilegesThatGrant(action); + if (privileges != null && privileges.size() > 0) { + message = message + + ", this action is granted by the index privileges [" + + collectionToCommaDelimitedString(privileges) + + "]"; + } } + + return message; } - return message; - } + @Override + public String remoteActionDenied( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo, + String action, + String clusterAlias + ) { + assert isIndexAction(action); + String userText = successfulAuthenticationDescription(authentication, authorizationInfo); + String remoteClusterText = remoteClusterText(clusterAlias); + return actionIsUnauthorizedMessage(action, remoteClusterText, userText) + + " because no remote indices privileges apply for the target cluster"; + } - static String remoteActionDenied( - Authentication authentication, - @Nullable AuthorizationInfo authorizationInfo, - String action, - String clusterAlias - ) { - assert isIndexAction(action); - String userText = successfulAuthenticationDescription(authentication, authorizationInfo); - String remoteClusterText = remoteClusterText(clusterAlias); - return actionIsUnauthorizedMessage(action, remoteClusterText, userText) - + " because no remote indices privileges apply for the target cluster"; - } + protected Collection findClusterPrivilegesThatGrant( + Authentication authentication, + String action, + TransportRequest request + ) { + return ClusterPrivilegeResolver.findPrivilegesThatGrant(action, request, authentication); + } - private static String remoteClusterText(@Nullable String clusterAlias) { - return Strings.format("towards remote cluster%s ", clusterAlias == null ? "" : " [" + clusterAlias + "]"); - } + protected Collection findIndexPrivilegesThatGrant(String action) { + return IndexPrivilege.findPrivilegesThatGrant(action); + } + + private String remoteClusterText(@Nullable String clusterAlias) { + return Strings.format("towards remote cluster%s ", clusterAlias == null ? "" : " [" + clusterAlias + "]"); + } - private static String authenticatedUserDescription(Authentication authentication) { - String userText = (authentication.isServiceAccount() ? "service account" : "user") - + " [" - + authentication.getAuthenticatingSubject().getUser().principal() - + "]"; - if (authentication.isAuthenticatedAsApiKey() || authentication.isCrossClusterAccess()) { - final String apiKeyId = (String) authentication.getAuthenticatingSubject() - .getMetadata() - .get(AuthenticationField.API_KEY_ID_KEY); - assert apiKeyId != null : "api key id must be present in the metadata"; - userText = "API key id [" + apiKeyId + "] of " + userText; - if (authentication.isCrossClusterAccess()) { - final Authentication crossClusterAccessAuthentication = (Authentication) authentication.getAuthenticatingSubject() + private String authenticatedUserDescription(Authentication authentication) { + String userText = (authentication.isServiceAccount() ? "service account" : "user") + + " [" + + authentication.getAuthenticatingSubject().getUser().principal() + + "]"; + if (authentication.isAuthenticatedAsApiKey() || authentication.isCrossClusterAccess()) { + final String apiKeyId = (String) authentication.getAuthenticatingSubject() .getMetadata() - .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); - assert crossClusterAccessAuthentication != null : "cross cluster access authentication must be present in the metadata"; - userText = successfulAuthenticationDescription(crossClusterAccessAuthentication, null) + " authenticated by " + userText; + .get(AuthenticationField.API_KEY_ID_KEY); + assert apiKeyId != null : "api key id must be present in the metadata"; + userText = "API key id [" + apiKeyId + "] of " + userText; + if (authentication.isCrossClusterAccess()) { + final Authentication crossClusterAccessAuthentication = (Authentication) authentication.getAuthenticatingSubject() + .getMetadata() + .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); + assert crossClusterAccessAuthentication != null : "cross cluster access authentication must be present in the metadata"; + userText = successfulAuthenticationDescription(crossClusterAccessAuthentication, null) + + " authenticated by " + + userText; + } } + return userText; } - return userText; - } - static String rolesDescription(Subject subject, @Nullable AuthorizationInfo authorizationInfo) { - // We cannot print the roles if it's an API key or a service account (both do not have roles, but privileges) - if (subject.getType() != Subject.Type.USER) { - return ""; - } + // package-private for tests + String rolesDescription(Subject subject, @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo) { + // We cannot print the roles if it's an API key or a service account (both do not have roles, but privileges) + if (subject.getType() != Subject.Type.USER) { + return ""; + } - final StringBuilder sb = new StringBuilder(); - final List effectiveRoleNames = extractEffectiveRoleNames(authorizationInfo); - if (effectiveRoleNames == null) { - sb.append(" with assigned roles [").append(Strings.arrayToCommaDelimitedString(subject.getUser().roles())).append("]"); - } else { - sb.append(" with effective roles [").append(Strings.collectionToCommaDelimitedString(effectiveRoleNames)).append("]"); - - final Set assignedRoleNames = Set.of(subject.getUser().roles()); - final SortedSet unfoundedRoleNames = Sets.sortedDifference(assignedRoleNames, Set.copyOf(effectiveRoleNames)); - if (false == unfoundedRoleNames.isEmpty()) { - sb.append(" (assigned roles [") - .append(Strings.collectionToCommaDelimitedString(unfoundedRoleNames)) - .append("] were not found)"); + final StringBuilder sb = new StringBuilder(); + final List effectiveRoleNames = extractEffectiveRoleNames(authorizationInfo); + if (effectiveRoleNames == null) { + sb.append(" with assigned roles [").append(Strings.arrayToCommaDelimitedString(subject.getUser().roles())).append("]"); + } else { + sb.append(" with effective roles [").append(Strings.collectionToCommaDelimitedString(effectiveRoleNames)).append("]"); + + final Set assignedRoleNames = Set.of(subject.getUser().roles()); + final SortedSet unfoundedRoleNames = Sets.sortedDifference(assignedRoleNames, Set.copyOf(effectiveRoleNames)); + if (false == unfoundedRoleNames.isEmpty()) { + sb.append(" (assigned roles [") + .append(Strings.collectionToCommaDelimitedString(unfoundedRoleNames)) + .append("] were not found)"); + } } + return sb.toString(); } - return sb.toString(); - } - static String successfulAuthenticationDescription(Authentication authentication, @Nullable AuthorizationInfo authorizationInfo) { - String userText = authenticatedUserDescription(authentication); + // package-private for tests + String successfulAuthenticationDescription( + Authentication authentication, + @Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo + ) { + String userText = authenticatedUserDescription(authentication); - if (authentication.isRunAs()) { - userText = userText + " run as [" + authentication.getEffectiveSubject().getUser().principal() + "]"; + if (authentication.isRunAs()) { + userText = userText + " run as [" + authentication.getEffectiveSubject().getUser().principal() + "]"; + } + + userText += rolesDescription(authentication.getEffectiveSubject(), authorizationInfo); + return userText; } - userText += rolesDescription(authentication.getEffectiveSubject(), authorizationInfo); - return userText; - } + private List extractEffectiveRoleNames(@Nullable AuthorizationEngine.AuthorizationInfo authorizationInfo) { + if (authorizationInfo == null) { + return null; + } - private static List extractEffectiveRoleNames(@Nullable AuthorizationInfo authorizationInfo) { - if (authorizationInfo == null) { - return null; + final Map info = authorizationInfo.asMap(); + final Object roleNames = info.get(PRINCIPAL_ROLES_FIELD_NAME); + // AuthorizationInfo from custom authorization engine may not have this field or have it as a different data type + if (false == roleNames instanceof String[]) { + assert false == authorizationInfo instanceof RBACEngine.RBACAuthorizationInfo + : "unexpected user.roles field [" + roleNames + "] for RBACAuthorizationInfo"; + return null; + } + return Arrays.stream((String[]) roleNames).sorted().toList(); } - final Map info = authorizationInfo.asMap(); - final Object roleNames = info.get(PRINCIPAL_ROLES_FIELD_NAME); - // AuthorizationInfo from custom authorization engine may not have this field or have it as a different data type - if (false == roleNames instanceof String[]) { - assert false == authorizationInfo instanceof RBACEngine.RBACAuthorizationInfo - : "unexpected user.roles field [" + roleNames + "] for RBACAuthorizationInfo"; - return null; + private String actionIsUnauthorizedMessage(String action, String userText) { + return actionIsUnauthorizedMessage(action, "", userText); } - return Arrays.stream((String[]) roleNames).sorted().toList(); - } - - private static String actionIsUnauthorizedMessage(String action, String userText) { - return actionIsUnauthorizedMessage(action, "", userText); - } - private static String actionIsUnauthorizedMessage(String action, String remoteClusterText, String userText) { - return "action [" + action + "] " + remoteClusterText + "is unauthorized for " + userText; + private String actionIsUnauthorizedMessage(String action, String remoteClusterText, String userText) { + return "action [" + action + "] " + remoteClusterText + "is unauthorized for " + userText; + } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index c886f2fde55ab..0c28ea1e37354 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -139,6 +139,7 @@ public class AuthorizationService { private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; + private final AuthorizationDenialMessages authorizationDenialMessages; public AuthorizationService( Settings settings, @@ -154,7 +155,8 @@ public AuthorizationService( XPackLicenseState licenseState, IndexNameExpressionResolver resolver, OperatorPrivilegesService operatorPrivilegesService, - RestrictedIndices restrictedIndices + RestrictedIndices restrictedIndices, + AuthorizationDenialMessages authorizationDenialMessages ) { this.clusterService = clusterService; this.auditTrailService = auditTrailService; @@ -178,6 +180,7 @@ public AuthorizationService( this.licenseState = licenseState; this.operatorPrivilegesService = operatorPrivilegesService; this.indicesAccessControlWrapper = new DlsFlsFeatureTrackingIndicesAccessControlWrapper(settings, licenseState); + this.authorizationDenialMessages = authorizationDenialMessages; } public void checkPrivileges( @@ -922,7 +925,7 @@ private ElasticsearchSecurityException runAsDenied( return denialException( authentication, action, - () -> AuthorizationDenialMessages.runAsDenied(authentication, authorizationInfo, action), + () -> authorizationDenialMessages.runAsDenied(authentication, authorizationInfo, action), null ); } @@ -932,7 +935,7 @@ public ElasticsearchSecurityException remoteActionDenied(Authentication authenti return denialException( authentication, action, - () -> AuthorizationDenialMessages.remoteActionDenied(authentication, authorizationInfo, action, clusterAlias), + () -> authorizationDenialMessages.remoteActionDenied(authentication, authorizationInfo, action, clusterAlias), null ); } @@ -967,7 +970,7 @@ private ElasticsearchSecurityException actionDenied( return denialException( authentication, action, - () -> AuthorizationDenialMessages.actionDenied(authentication, authorizationInfo, action, request, context), + () -> authorizationDenialMessages.actionDenied(authentication, authorizationInfo, action, request, context), cause ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java index 73bd70705120c..f3b2d65ad1b0c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationDenialMessagesTests.java @@ -29,16 +29,15 @@ public class AuthorizationDenialMessagesTests extends ESTestCase { + private final AuthorizationDenialMessages.Default denialMessages = new AuthorizationDenialMessages.Default(); + public void testNoRolesDescriptionIfSubjectIsNotAUser() { final Authentication authentication = randomFrom( AuthenticationTestHelper.builder().apiKey().build(), AuthenticationTestHelper.builder().serviceAccount().build() ); - assertThat( - AuthorizationDenialMessages.rolesDescription(authentication.getEffectiveSubject(), mock(AuthorizationInfo.class)), - equalTo("") - ); + assertThat(denialMessages.rolesDescription(authentication.getEffectiveSubject(), mock(AuthorizationInfo.class)), equalTo("")); } public void testRolesDescriptionWithNullAuthorizationInfo() { @@ -51,7 +50,7 @@ public void testRolesDescriptionWithNullAuthorizationInfo() { .user(new User(randomAlphaOfLengthBetween(3, 8), assignedRoleNames.toArray(String[]::new))) .build(false) .getEffectiveSubject(); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, null); + final String rolesDescription = denialMessages.rolesDescription(subject, null); assertThat( rolesDescription, @@ -71,7 +70,7 @@ public void testRolesDescriptionWithNullRolesField() { .getEffectiveSubject(); final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of()); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -102,7 +101,7 @@ public void testRolesDescriptionWithIncompatibleRolesField() { ) ) ); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -123,7 +122,7 @@ public void testRoleDescriptionWithEmptyResolvedRole() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", Strings.EMPTY_ARRAY)); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); if (assignedRoleNames.isEmpty()) { assertThat(rolesDescription, equalTo(" with effective roles []")); @@ -160,7 +159,7 @@ public void testRoleDescriptionAllResolvedAndMaybeWithAnonymousRoles() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", effectiveRoleNames.toArray(String[]::new))); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -195,7 +194,7 @@ public void testRoleDescriptionWithUnresolvedRoles() { final AuthorizationInfo authorizationInfo = mock(AuthorizationInfo.class); when(authorizationInfo.asMap()).thenReturn(Map.of("user.roles", effectiveRoleNames.toArray(String[]::new))); - final String rolesDescription = AuthorizationDenialMessages.rolesDescription(subject, authorizationInfo); + final String rolesDescription = denialMessages.rolesDescription(subject, authorizationInfo); assertThat( rolesDescription, @@ -220,13 +219,13 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); final String action = "indices:/some/action/" + randomAlphaOfLengthBetween(0, 8); assertThat( - AuthorizationDenialMessages.actionDenied(authentication, null, action, mock(), null), + denialMessages.actionDenied(authentication, null, action, mock(), null), equalTo( Strings.format( "action [%s] towards remote cluster is unauthorized for %s authenticated by API key id [%s] of user [%s], " + "this action is granted by the index privileges [all]", action, - AuthorizationDenialMessages.successfulAuthenticationDescription(innerAuthentication, null), + denialMessages.successfulAuthenticationDescription(innerAuthentication, null), authentication.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY), authentication.getEffectiveSubject().getUser().principal() ) @@ -237,7 +236,7 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { public void testSuccessfulAuthenticationDescription() { final Authentication authentication1 = AuthenticationTestHelper.builder().realm().build(false); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication1, null), + denialMessages.successfulAuthenticationDescription(authentication1, null), equalTo( Strings.format( "user [%s] with assigned roles [%s]", @@ -249,7 +248,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication2 = AuthenticationTestHelper.builder().realm().runAs().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication2, null), + denialMessages.successfulAuthenticationDescription(authentication2, null), equalTo( Strings.format( "user [%s] run as [%s] with assigned roles [%s]", @@ -262,7 +261,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication3 = AuthenticationTestHelper.builder().apiKey().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication3, null), + denialMessages.successfulAuthenticationDescription(authentication3, null), equalTo( Strings.format( "API key id [%s] of user [%s]", @@ -274,7 +273,7 @@ public void testSuccessfulAuthenticationDescription() { final Authentication authentication4 = AuthenticationTestHelper.builder().serviceAccount().build(); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication4, null), + denialMessages.successfulAuthenticationDescription(authentication4, null), equalTo(Strings.format("service account [%s]", authentication4.getEffectiveSubject().getUser().principal())) ); @@ -286,11 +285,11 @@ public void testSuccessfulAuthenticationDescription() { .getMetadata() .get(AuthenticationField.CROSS_CLUSTER_ACCESS_AUTHENTICATION_KEY); assertThat( - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication5, null), + denialMessages.successfulAuthenticationDescription(authentication5, null), equalTo( Strings.format( "%s authenticated by API key id [%s] of user [%s]", - AuthorizationDenialMessages.successfulAuthenticationDescription(innerAuthentication, null), + denialMessages.successfulAuthenticationDescription(innerAuthentication, null), authentication5.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY), authentication5.getEffectiveSubject().getUser().principal() ) @@ -303,14 +302,14 @@ public void testRemoteActionDenied() { final String action = "indices:/some/action/" + randomAlphaOfLengthBetween(0, 8); final String clusterAlias = randomAlphaOfLengthBetween(5, 12); assertThat( - AuthorizationDenialMessages.remoteActionDenied(authentication, null, action, clusterAlias), + denialMessages.remoteActionDenied(authentication, null, action, clusterAlias), equalTo( Strings.format( "action [%s] towards remote cluster [%s] is unauthorized for %s" + " because no remote indices privileges apply for the target cluster", action, clusterAlias, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, null) + denialMessages.successfulAuthenticationDescription(authentication, null) ) ) ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 8a0041ef2bb76..2cc6c7d569f44 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -321,7 +321,8 @@ public void setup() { licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); } @@ -1743,7 +1744,8 @@ public void testDenialForAnonymousUser() { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); RoleDescriptor role = new RoleDescriptor( @@ -1791,7 +1793,8 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() { new XPackLicenseState(() -> 0), TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); RoleDescriptor role = new RoleDescriptor( @@ -3307,7 +3310,8 @@ public void testAuthorizationEngineSelectionForCheckPrivileges() throws Exceptio licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); Subject subject = new Subject(new User("test", "a role"), mock(RealmRef.class)); @@ -3462,7 +3466,8 @@ public void getUserPrivileges(AuthorizationInfo authorizationInfo, ActionListene licenseState, TestIndexNameExpressionResolver.newInstance(), operatorPrivilegesService, - RESTRICTED_INDICES + RESTRICTED_INDICES, + new AuthorizationDenialMessages.Default() ); Authentication authentication; try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { @@ -3561,7 +3566,7 @@ public void testRemoteActionDenied() { + " because no remote indices privileges apply for the target cluster", action, clusterAlias, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, authorizationInfo) + new AuthorizationDenialMessages.Default().successfulAuthenticationDescription(authentication, authorizationInfo) ) ) ); @@ -3583,7 +3588,7 @@ public void testActionDeniedForCrossClusterAccessAuthentication() { Strings.format( "action [%s] towards remote cluster is unauthorized for %s", action, - AuthorizationDenialMessages.successfulAuthenticationDescription(authentication, authorizationInfo) + new AuthorizationDenialMessages.Default().successfulAuthenticationDescription(authentication, authorizationInfo) ) ) ); From 628689f0eb0967c137d97af3bd5c3a3fd6508cf4 Mon Sep 17 00:00:00 2001 From: Tom Veasey Date: Tue, 19 Mar 2024 13:14:14 +0000 Subject: [PATCH 017/214] [ML] Fix the position of spike, dip and distribution changes bucket when the sibling aggregation includes empty buckets (#106472) There was a bug in our indexing of the spike, dip and distribution change bucket. Specifically, we were not mapping the index in the values array, which skips empty buckets, back to the aggregation bucket index. The effect was that the reported buckets were offset to the left when the spike, dip or distribution change occurred after one or more empty buckets. --- docs/changelog/106472.yaml | 6 ++++ .../changepoint/ChangePointAggregator.java | 10 +++---- .../aggs/changepoint/SpikeAndDipDetector.java | 18 ++++++------ .../changepoint/SpikeAndDipDetectorTests.java | 28 +++++++++++++++++-- 4 files changed, 46 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/106472.yaml diff --git a/docs/changelog/106472.yaml b/docs/changelog/106472.yaml new file mode 100644 index 0000000000000..120286c4cd8c7 --- /dev/null +++ b/docs/changelog/106472.yaml @@ -0,0 +1,6 @@ +pr: 106472 +summary: "Fix the position of spike, dip and distribution changes bucket when the\ + \ sibling aggregation includes empty buckets" +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java index 650c02af00837..faef29ff65070 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregator.java @@ -93,13 +93,13 @@ public ChangePointAggregator(String name, String bucketsPath, Map maybeBucketsValue = extractDoubleBucketedValues( + Optional maybeBucketValues = extractDoubleBucketedValues( bucketsPaths()[0], aggregations, BucketHelpers.GapPolicy.SKIP, true ); - if (maybeBucketsValue.isEmpty()) { + if (maybeBucketValues.isEmpty()) { return new InternalChangePointAggregation( name(), metadata(), @@ -107,7 +107,7 @@ public InternalAggregation doReduce(InternalAggregations aggregations, Aggregati new ChangeType.Indeterminable("unable to find valid bucket values in bucket path [" + bucketsPaths()[0] + "]") ); } - MlAggsHelper.DoubleBucketValues bucketValues = maybeBucketsValue.get(); + MlAggsHelper.DoubleBucketValues bucketValues = maybeBucketValues.get(); if (bucketValues.getValues().length < (2 * MINIMUM_BUCKETS) + 2) { return new InternalChangePointAggregation( name(), @@ -146,7 +146,7 @@ public InternalAggregation doReduce(InternalAggregations aggregations, Aggregati static ChangeType testForSpikeOrDip(MlAggsHelper.DoubleBucketValues bucketValues, double pValueThreshold) { try { SpikeAndDipDetector detect = new SpikeAndDipDetector(bucketValues.getValues()); - ChangeType result = detect.at(pValueThreshold); + ChangeType result = detect.at(pValueThreshold, bucketValues); logger.trace("spike or dip p-value: [{}]", result.pValue()); return result; } catch (NotStrictlyPositiveException nspe) { @@ -552,7 +552,7 @@ ChangeType changeType(MlAggsHelper.DoubleBucketValues bucketValues, double slope case TREND_CHANGE: return new ChangeType.TrendChange(pValueVsStationary(), rSquared(), bucketValues.getBucketIndex(changePoint)); case DISTRIBUTION_CHANGE: - return new ChangeType.DistributionChange(pValue, changePoint); + return new ChangeType.DistributionChange(pValue, bucketValues.getBucketIndex(changePoint)); } throw new RuntimeException("Unknown change type [" + type + "]."); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java index 8bbd793637ab3..b628ea3324cf1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetector.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ml.aggs.changepoint; +import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; + import java.util.Arrays; /** @@ -133,29 +135,29 @@ private double[] removeIf(ExcludedPredicate should, double[] values) { spikeTestKDE = new KDE(spikeKDEValues, 1.36); } - ChangeType at(double pValueThreshold) { + ChangeType at(double pValueThreshold, MlAggsHelper.DoubleBucketValues bucketValues) { if (dipIndex == -1 || spikeIndex == -1) { return new ChangeType.Indeterminable( "not enough buckets to check for dip or spike. Requires at least [3]; found [" + numValues + "]" ); } - KDE.ValueAndMagnitude dipLeftLeftTailTest = dipTestKDE.cdf(dipValue); + KDE.ValueAndMagnitude dipLeftTailTest = dipTestKDE.cdf(dipValue); KDE.ValueAndMagnitude spikeRightTailTest = spikeTestKDE.sf(spikeValue); - double dipPValue = dipLeftLeftTailTest.pValue(numValues); + double dipPValue = dipLeftTailTest.pValue(numValues); double spikePValue = spikeRightTailTest.pValue(numValues); if (dipPValue < pValueThreshold && spikePValue < pValueThreshold) { - if (dipLeftLeftTailTest.isMoreSignificant(spikeRightTailTest)) { - return new ChangeType.Dip(dipPValue, dipIndex); + if (dipLeftTailTest.isMoreSignificant(spikeRightTailTest)) { + return new ChangeType.Dip(dipPValue, bucketValues.getBucketIndex(dipIndex)); } - return new ChangeType.Spike(spikePValue, spikeIndex); + return new ChangeType.Spike(spikePValue, bucketValues.getBucketIndex(spikeIndex)); } if (dipPValue < pValueThreshold) { - return new ChangeType.Dip(dipPValue, dipIndex); + return new ChangeType.Dip(dipPValue, bucketValues.getBucketIndex(dipIndex)); } if (spikePValue < pValueThreshold) { - return new ChangeType.Spike(spikePValue, spikeIndex); + return new ChangeType.Spike(spikePValue, bucketValues.getBucketIndex(spikeIndex)); } return new ChangeType.Stationary(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java index 5653af2a000f5..fe91aa3e6a600 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/SpikeAndDipDetectorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.aggs.changepoint; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.aggs.MlAggsHelper; import java.util.Arrays; @@ -19,10 +20,13 @@ public class SpikeAndDipDetectorTests extends ESTestCase { public void testTooLittleData() { for (int i = 0; i < 4; i++) { + long[] docCounts = new long[i]; double[] values = new double[i]; + Arrays.fill(docCounts, 1); Arrays.fill(values, 1.0); + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - assertThat(detect.at(0.01), instanceOf(ChangeType.Indeterminable.class)); + assertThat(detect.at(0.01, bucketValues), instanceOf(ChangeType.Indeterminable.class)); } } @@ -142,24 +146,42 @@ public void testDetection() { // Check vs some expected values. { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; double[] values = new double[] { 0.1, 3.1, 1.2, 1.7, 0.9, 2.3, -0.8, 3.2, 1.2, 1.3, 1.1, 1.0, 8.5, 0.5, 2.6, 0.7 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - ChangeType change = detect.at(0.05); + ChangeType change = detect.at(0.05, bucketValues); assertThat(change, instanceOf(ChangeType.Spike.class)); assertThat(change.pValue(), closeTo(3.0465e-12, 1e-15)); } { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; double[] values = new double[] { 0.1, 3.1, 1.2, 1.7, 0.9, 2.3, -4.2, 3.2, 1.2, 1.3, 1.1, 1.0, 3.5, 0.5, 2.6, 0.7 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values); SpikeAndDipDetector detect = new SpikeAndDipDetector(values); - ChangeType change = detect.at(0.05); + ChangeType change = detect.at(0.05, bucketValues); assertThat(change, instanceOf(ChangeType.Dip.class)); assertThat(change.pValue(), closeTo(1.2589e-08, 1e-11)); } } + + public void testMissingBuckets() { + long[] docCounts = new long[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; + double[] values = new double[] { 1.0, 2.0, 0.7, 1.0, 1.5, 1.1, 2.2, 10.0, 0.3, 0.4, 0.7, 0.9, 1.4, 2.1, 1.2, 1.0 }; + int[] buckets = new int[] { 0, 2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 15, 17, 18, 19, 20 }; + MlAggsHelper.DoubleBucketValues bucketValues = new MlAggsHelper.DoubleBucketValues(docCounts, values, buckets); + + SpikeAndDipDetector detect = new SpikeAndDipDetector(values); + + ChangeType change = detect.at(0.01, bucketValues); + + assertThat(change, instanceOf(ChangeType.Spike.class)); + assertThat(change.changePoint(), equalTo(10)); + } } From c1d0e8ed2a8d977ee70faddce69d571c79c693c8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Mar 2024 09:20:11 -0400 Subject: [PATCH 018/214] ESQL: Drop a parameter from `BlockHash` (#106417) This drops the `DriverContext` from `BlockHash` - we already have it in `BlockFactory`. --- .../compute/operator/AggregatorBenchmark.java | 2 +- .../aggregation/blockhash/BlockHash.java | 33 +++++++++---------- .../blockhash/BooleanBlockHash.java | 6 ++-- .../blockhash/BytesRefBlockHash.java | 8 ++--- .../blockhash/BytesRefLongBlockHash.java | 10 +++--- .../blockhash/DoubleBlockHash.java | 8 ++--- .../aggregation/blockhash/IntBlockHash.java | 8 ++--- .../aggregation/blockhash/LongBlockHash.java | 8 ++--- .../blockhash/LongLongBlockHash.java | 7 ++-- .../aggregation/blockhash/NullBlockHash.java | 6 ++-- .../blockhash/PackedValuesBlockHash.java | 8 ++--- .../operator/HashAggregationOperator.java | 2 +- .../operator/OrdinalsGroupingOperator.java | 7 +++- .../elasticsearch/compute/OperatorTests.java | 2 +- .../blockhash/BlockHashRandomizedTests.java | 6 ++-- .../aggregation/blockhash/BlockHashTests.java | 6 ++-- .../TestPhysicalOperationProviders.java | 2 +- 17 files changed, 63 insertions(+), 66 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java index 63686023498c9..d3aef8746b068 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java @@ -147,7 +147,7 @@ private static Operator operator(DriverContext driverContext, String grouping, S }; return new HashAggregationOperator( List.of(supplier(op, dataType, groups.size()).groupingAggregatorFactory(AggregatorMode.SINGLE)), - () -> BlockHash.build(groups, driverContext, 16 * 1024, false), + () -> BlockHash.build(groups, driverContext.blockFactory(), 16 * 1024, false), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index dd760408b3be5..93cd3a6b9326a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -19,7 +19,6 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.core.Releasable; @@ -36,12 +35,10 @@ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // permits BooleanBlockHash, BytesRefBlockHash, DoubleBlockHash, IntBlockHash, LongBlockHash,// NullBlockHash, PackedValuesBlockHash, BytesRefLongBlockHash, LongLongBlockHash { - protected final BigArrays bigArrays; protected final BlockFactory blockFactory; - BlockHash(DriverContext driverContext) { - bigArrays = driverContext.bigArrays(); - blockFactory = driverContext.blockFactory(); + BlockHash(BlockFactory blockFactory) { + this.blockFactory = blockFactory; } /** @@ -79,40 +76,40 @@ public abstract sealed class BlockHash implements Releasable, SeenGroupIds // */ public static BlockHash build( List groups, - DriverContext driverContext, + BlockFactory blockFactory, int emitBatchSize, boolean allowBrokenOptimizations ) { if (groups.size() == 1) { - return newForElementType(groups.get(0).channel(), groups.get(0).elementType(), driverContext); + return newForElementType(groups.get(0).channel(), groups.get(0).elementType(), blockFactory); } if (allowBrokenOptimizations && groups.size() == 2) { var g1 = groups.get(0); var g2 = groups.get(1); if (g1.elementType() == ElementType.LONG && g2.elementType() == ElementType.LONG) { - return new LongLongBlockHash(driverContext, g1.channel(), g2.channel(), emitBatchSize); + return new LongLongBlockHash(blockFactory, g1.channel(), g2.channel(), emitBatchSize); } if (g1.elementType() == ElementType.BYTES_REF && g2.elementType() == ElementType.LONG) { - return new BytesRefLongBlockHash(driverContext, g1.channel(), g2.channel(), false, emitBatchSize); + return new BytesRefLongBlockHash(blockFactory, g1.channel(), g2.channel(), false, emitBatchSize); } if (g1.elementType() == ElementType.LONG && g2.elementType() == ElementType.BYTES_REF) { - return new BytesRefLongBlockHash(driverContext, g2.channel(), g1.channel(), true, emitBatchSize); + return new BytesRefLongBlockHash(blockFactory, g2.channel(), g1.channel(), true, emitBatchSize); } } - return new PackedValuesBlockHash(groups, driverContext, emitBatchSize); + return new PackedValuesBlockHash(groups, blockFactory, emitBatchSize); } /** * Creates a specialized hash table that maps a {@link Block} of the given input element type to ids. */ - private static BlockHash newForElementType(int channel, ElementType type, DriverContext driverContext) { + private static BlockHash newForElementType(int channel, ElementType type, BlockFactory blockFactory) { return switch (type) { - case NULL -> new NullBlockHash(channel, driverContext); - case BOOLEAN -> new BooleanBlockHash(channel, driverContext); - case INT -> new IntBlockHash(channel, driverContext); - case LONG -> new LongBlockHash(channel, driverContext); - case DOUBLE -> new DoubleBlockHash(channel, driverContext); - case BYTES_REF -> new BytesRefBlockHash(channel, driverContext); + case NULL -> new NullBlockHash(channel, blockFactory); + case BOOLEAN -> new BooleanBlockHash(channel, blockFactory); + case INT -> new IntBlockHash(channel, blockFactory); + case LONG -> new LongBlockHash(channel, blockFactory); + case DOUBLE -> new DoubleBlockHash(channel, blockFactory); + case BYTES_REF -> new BytesRefBlockHash(channel, blockFactory); default -> throw new IllegalArgumentException("unsupported grouping element type [" + type + "]"); }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 79da105a9adaa..09ec04a1e4575 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupeBoolean; import static org.elasticsearch.compute.operator.MultivalueDedupeBoolean.FALSE_ORD; @@ -30,8 +30,8 @@ final class BooleanBlockHash extends BlockHash { private final int channel; private final boolean[] everSeen = new boolean[TRUE_ORD + 1]; - BooleanBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + BooleanBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index fb9b680c62d1d..f368852ef78fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -18,12 +18,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeBytesRef; @@ -46,10 +46,10 @@ final class BytesRefBlockHash extends BlockHash { */ private boolean seenNull; - BytesRefBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + BytesRefBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.bytesRefHash = new BytesRefHash(1, bigArrays); + this.bytesRefHash = new BytesRefHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index 7ee8a7165aa17..d11b3f0070e14 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -16,13 +16,13 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasables; /** @@ -36,8 +36,8 @@ final class BytesRefLongBlockHash extends BlockHash { private final BytesRefHash bytesHash; private final LongLongHash finalHash; - BytesRefLongBlockHash(DriverContext driverContext, int channel1, int channel2, boolean reverseOutput, int emitBatchSize) { - super(driverContext); + BytesRefLongBlockHash(BlockFactory blockFactory, int channel1, int channel2, boolean reverseOutput, int emitBatchSize) { + super(blockFactory); this.channel1 = channel1; this.channel2 = channel2; this.reverseOutput = reverseOutput; @@ -47,8 +47,8 @@ final class BytesRefLongBlockHash extends BlockHash { BytesRefHash bytesHash = null; LongLongHash longHash = null; try { - bytesHash = new BytesRefHash(1, bigArrays); - longHash = new LongLongHash(1, bigArrays); + bytesHash = new BytesRefHash(1, blockFactory.bigArrays()); + longHash = new LongLongHash(1, blockFactory.bigArrays()); this.bytesHash = bytesHash; this.finalHash = longHash; success = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index c03ce2a0a4dce..fe15a21a4beb0 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -13,12 +13,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeDouble; @@ -40,10 +40,10 @@ final class DoubleBlockHash extends BlockHash { */ private boolean seenNull; - DoubleBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + DoubleBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index bd5438da153e4..47911c61fd704 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -13,10 +13,10 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeInt; @@ -37,10 +37,10 @@ final class IntBlockHash extends BlockHash { */ private boolean seenNull; - IntBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + IntBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index d817edb9e059a..639d9cf48a515 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -13,12 +13,12 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.compute.operator.MultivalueDedupeLong; @@ -40,10 +40,10 @@ final class LongBlockHash extends BlockHash { */ private boolean seenNull; - LongBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + LongBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; - this.longHash = new LongHash(1, bigArrays); + this.longHash = new LongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index 49b16198a5d77..056c3985b8728 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -20,7 +20,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -33,12 +32,12 @@ final class LongLongBlockHash extends BlockHash { private final int emitBatchSize; private final LongLongHash hash; - LongLongBlockHash(DriverContext driverContext, int channel1, int channel2, int emitBatchSize) { - super(driverContext); + LongLongBlockHash(BlockFactory blockFactory, int channel1, int channel2, int emitBatchSize) { + super(blockFactory); this.channel1 = channel1; this.channel2 = channel2; this.emitBatchSize = emitBatchSize; - this.hash = new LongLongHash(1, bigArrays); + this.hash = new LongLongHash(1, blockFactory.bigArrays()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java index 0c658ade236fd..601d75d832004 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/NullBlockHash.java @@ -11,10 +11,10 @@ import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; /** * Maps a {@link BooleanBlock} column to group ids. Assigns group @@ -24,8 +24,8 @@ final class NullBlockHash extends BlockHash { private final int channel; private boolean seenNull = false; - NullBlockHash(int channel, DriverContext driverContext) { - super(driverContext); + NullBlockHash(int channel, BlockFactory blockFactory) { + super(blockFactory); this.channel = channel; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index b58c50b79311a..1e6a6b790bba8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -16,11 +16,11 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.BatchEncoder; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.core.Releasables; @@ -59,11 +59,11 @@ final class PackedValuesBlockHash extends BlockHash { private final BytesRefBuilder bytes = new BytesRefBuilder(); private final Group[] groups; - PackedValuesBlockHash(List specs, DriverContext driverContext, int emitBatchSize) { - super(driverContext); + PackedValuesBlockHash(List specs, BlockFactory blockFactory, int emitBatchSize) { + super(blockFactory); this.groups = specs.stream().map(Group::new).toArray(Group[]::new); this.emitBatchSize = emitBatchSize; - this.bytesRefHash = new BytesRefHash(1, bigArrays); + this.bytesRefHash = new BytesRefHash(1, blockFactory.bigArrays()); this.nullTrackingBytes = (groups.length + 7) / 8; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 6dcdd15fd1d1c..04b9d576fe0aa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -47,7 +47,7 @@ public record HashAggregationOperatorFactory(List groups, List BlockHash.build(groups, driverContext, maxPageSize, false), + () -> BlockHash.build(groups, driverContext.blockFactory(), maxPageSize, false), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index c3a26cedf5bbe..226a0ac534942 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -478,7 +478,12 @@ private static class ValuesAggregator implements Releasable { ); this.aggregator = new HashAggregationOperator( aggregatorFactories, - () -> BlockHash.build(List.of(new GroupSpec(channelIndex, groupingElementType)), driverContext, maxPageSize, false), + () -> BlockHash.build( + List.of(new GroupSpec(channelIndex, groupingElementType)), + driverContext.blockFactory(), + maxPageSize, + false + ), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index bce4d2b0a454a..34fb0f96b8722 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -240,7 +240,7 @@ public String toString() { List.of(CountAggregatorFunction.supplier(List.of(1, 2)).groupingAggregatorFactory(FINAL)), () -> BlockHash.build( List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), - driverContext, + driverContext.blockFactory(), randomPageSize(), false ), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 2dc527ce213d6..a874836198be0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.MockBlockFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupeTests; import org.elasticsearch.core.Releasables; @@ -202,10 +201,9 @@ private BlockHash newBlockHash(BlockFactory blockFactory, int emitBatchSize, Lis for (int c = 0; c < types.size(); c++) { specs.add(new HashAggregationOperator.GroupSpec(c, types.get(c))); } - DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); return forcePackedHash - ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) - : BlockHash.build(specs, driverContext, emitBatchSize, true); + ? new PackedValuesBlockHash(specs, blockFactory, emitBatchSize) + : BlockHash.build(specs, blockFactory, emitBatchSize, true); } private static class KeyComparator implements Comparator> { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 4e392ca24dada..2dc46e71360c9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -1156,11 +1155,10 @@ private void hash(Consumer callback, int emitBatchSize, Block... va for (int c = 0; c < values.length; c++) { specs.add(new HashAggregationOperator.GroupSpec(c, values[c].elementType())); } - DriverContext driverContext = new DriverContext(bigArrays, blockFactory); try ( BlockHash blockHash = forcePackedHash - ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) - : BlockHash.build(specs, driverContext, emitBatchSize, true) + ? new PackedValuesBlockHash(specs, blockFactory, emitBatchSize) + : BlockHash.build(specs, blockFactory, emitBatchSize, true) ) { hash(true, blockHash, callback, values); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 2f201196fbe83..043d75ea1cbca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -277,7 +277,7 @@ public Operator get(DriverContext driverContext) { aggregators, () -> BlockHash.build( List.of(new HashAggregationOperator.GroupSpec(groupByChannel, groupElementType)), - driverContext, + driverContext.blockFactory(), pageSize, false ), From 823f174c70ad4e93c8db38fe0d1cff4bb0e865da Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 19 Mar 2024 07:20:22 -0700 Subject: [PATCH 019/214] Auto remove sink handlers when completed (#106438) Rather than manually removing sink handlers upon completion, this PR registers a listener that automatically removes the handler from ExchangeService when it completes or fails. --- .../operator/exchange/ExchangeService.java | 19 +------- .../exchange/ExchangeSinkHandler.java | 4 +- .../xpack/esql/plugin/ComputeService.java | 48 +++++++++---------- 3 files changed, 26 insertions(+), 45 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index a8afce1a3b223..a25f8e86abe79 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -97,6 +96,7 @@ public ExchangeSinkHandler createSinkHandler(String exchangeId, int maxBufferSiz if (sinks.putIfAbsent(exchangeId, sinkHandler) != null) { throw new IllegalStateException("sink exchanger for id [" + exchangeId + "] already exists"); } + sinkHandler.addCompletionListener(ActionListener.running(() -> sinks.remove(exchangeId))); return sinkHandler; } @@ -111,20 +111,6 @@ public ExchangeSinkHandler getSinkHandler(String exchangeId) { return sinkHandler; } - /** - * Removes the exchange sink handler associated with the given exchange id. - * W will abort the sink handler if the given failure is not null. - */ - public void finishSinkHandler(String exchangeId, @Nullable Exception failure) { - final ExchangeSinkHandler sinkHandler = sinks.remove(exchangeId); - if (sinkHandler != null) { - if (failure != null) { - sinkHandler.onFailure(failure); - } - assert sinkHandler.isFinished() : "Exchange sink " + exchangeId + " wasn't finished yet"; - } - } - /** * Opens a remote sink handler on the remote node for the given session ID. */ @@ -215,8 +201,7 @@ protected void runInternal() { } long elapsed = nowInMillis - sink.lastUpdatedTimeInMillis(); if (elapsed > maxInterval.millis()) { - finishSinkHandler( - e.getKey(), + sink.onFailure( new ElasticsearchTimeoutException( "Exchange sink {} has been inactive for {}", e.getKey(), diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index ab155d6ee8479..eae76495a4c69 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -119,9 +119,9 @@ public boolean isFinished() { } /** - * Fails this sink exchange handler + * Aborts and fails this exchange sink handler */ - void onFailure(Exception failure) { + public void onFailure(Exception failure) { completionFuture.onFailure(failure); buffer.finish(true); notifyListeners(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 90cbc018b77dc..f479928d3b886 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -605,19 +605,21 @@ private class DataNodeRequestExecutor { List driverProfiles, ActionListener listener ) { + parentTask.addListener(() -> exchangeSink.onFailure(new TaskCancelledException(parentTask.getReasonCancelled()))); this.request = request; this.parentTask = parentTask; this.exchangeSink = exchangeSink; - this.listener = listener; this.driverProfiles = driverProfiles; this.maxConcurrentShards = maxConcurrentShards; this.blockingSink = exchangeSink.createExchangeSink(); + this.listener = listener.delegateResponse((l, e) -> { + blockingSink.finish(); + exchangeSink.onFailure(e); + l.onFailure(e); + }); } void start() { - parentTask.addListener( - () -> exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(parentTask.getReasonCancelled())) - ); runBatch(0); } @@ -634,9 +636,9 @@ private void runBatch(int startBatchIndex) { parentTask, computeContext, request.plan(), - ActionListener.wrap(profiles -> onBatchCompleted(endBatchIndex, profiles), this::onFailure) + listener.delegateFailureAndWrap((l, profiles) -> onBatchCompleted(endBatchIndex, profiles)) ); - }, this::onFailure)); + }, listener::onFailure)); } private void onBatchCompleted(int lastBatchIndex, List batchProfiles) { @@ -649,18 +651,10 @@ private void onBatchCompleted(int lastBatchIndex, List batchProfi blockingSink.finish(); // don't return until all pages are fetched exchangeSink.addCompletionListener( - ContextPreservingActionListener.wrapPreservingContext( - ActionListener.runBefore(listener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)), - transportService.getThreadPool().getThreadContext() - ) + ContextPreservingActionListener.wrapPreservingContext(listener, transportService.getThreadPool().getThreadContext()) ); } } - - private void onFailure(Exception e) { - exchangeService.finishSinkHandler(request.sessionId(), e); - listener.onFailure(e); - } } private void runComputeOnDataNode( @@ -675,10 +669,19 @@ private void runComputeOnDataNode( : List.of(); final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); + var externalSink = exchangeService.getSinkHandler(externalId); + listener = listener.delegateResponse((l, e) -> { + externalSink.onFailure(e); + l.onFailure(e); + }); try (RefCountingListener refs = new RefCountingListener(listener.map(i -> new ComputeResponse(collectedProfiles)))) { final AtomicBoolean cancelled = new AtomicBoolean(); // run compute with target shards var internalSink = exchangeService.createSinkHandler(request.sessionId(), request.pragmas().exchangeBufferSize()); + listener = listener.delegateResponse((l, e) -> { + internalSink.onFailure(e); + l.onFailure(e); + }); DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( request, task, @@ -689,8 +692,7 @@ private void runComputeOnDataNode( ); dataNodeRequestExecutor.start(); // run the node-level reduction - var externalSink = exchangeService.getSinkHandler(externalId); - task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); + task.addListener(() -> externalSink.onFailure(new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); @@ -711,17 +713,13 @@ private void runComputeOnDataNode( collectedProfiles.addAll(driverProfiles); } // don't return until all pages are fetched - externalSink.addCompletionListener( - ActionListener.runBefore(reductionListener, () -> exchangeService.finishSinkHandler(externalId, null)) - ); + externalSink.addCompletionListener(reductionListener); }, e -> { - exchangeService.finishSinkHandler(externalId, e); + externalSink.onFailure(e); reductionListener.onFailure(e); }) ); } catch (Exception e) { - exchangeService.finishSinkHandler(externalId, e); - exchangeService.finishSinkHandler(request.sessionId(), e); listener.onFailure(e); } } @@ -798,9 +796,7 @@ void runComputeOnRemoteCluster( ActionListener listener ) { final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); - parentTask.addListener( - () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) - ); + parentTask.addListener(() -> exchangeSink.onFailure(new TaskCancelledException(parentTask.getReasonCancelled()))); ThreadPool threadPool = transportService.getThreadPool(); final var responseHeadersCollector = new ResponseHeadersCollector(threadPool.getThreadContext()); listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); From 8357d2b32b812fa79842edc9f956af2f2e47d82e Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 19 Mar 2024 07:20:42 -0700 Subject: [PATCH 020/214] Fix assumption in TimeSeriesIT (#106448) This should be an assumption instead of an assertion. Closes #106444 --- .../java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index c3020c510fc24..406361438fc42 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -16,13 +16,12 @@ public class TimeSeriesIT extends AbstractEsqlIntegTestCase { @Override protected EsqlQueryResponse run(EsqlQueryRequest request) { - assertTrue("timseries requires pragmas", canUseQueryPragmas()); + assumeTrue("timseries requires pragmas", canUseQueryPragmas()); var settings = Settings.builder().put(request.pragmas().getSettings()).put(QueryPragmas.TIME_SERIES_MODE.getKey(), "true").build(); request.pragmas(new QueryPragmas(settings)); return super.run(request); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106444") public void testEmpty() { Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("pod")).build(); client().admin() From 2489d2360c6fe1bfcee222690166f028588743ae Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 19 Mar 2024 15:53:29 +0100 Subject: [PATCH 021/214] Skip older versions for new test (#106480) This error message only works in 8.14 --- .../resources/rest-api-spec/test/esql/61_enrich_ip.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 89e3c31bd475a..76dff626b9481 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -106,6 +106,10 @@ teardown: --- "Invalid IP strings": + - skip: + version: " - 8.13.99" + reason: "IP range ENRICH support was added in 8.14.0" + - do: catch: /'invalid_[\d\.]+' is not an IP string literal/ esql.query: From edc45f56a92137c6d30537e91c04529260c8c921 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 19 Mar 2024 08:01:51 -0700 Subject: [PATCH 022/214] Revert "Auto remove sink handlers when completed (#106438)" This reverts commit 823f174c70ad4e93c8db38fe0d1cff4bb0e865da. --- .../operator/exchange/ExchangeService.java | 19 +++++++- .../exchange/ExchangeSinkHandler.java | 4 +- .../xpack/esql/plugin/ComputeService.java | 48 ++++++++++--------- 3 files changed, 45 insertions(+), 26 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index a25f8e86abe79..a8afce1a3b223 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -96,7 +97,6 @@ public ExchangeSinkHandler createSinkHandler(String exchangeId, int maxBufferSiz if (sinks.putIfAbsent(exchangeId, sinkHandler) != null) { throw new IllegalStateException("sink exchanger for id [" + exchangeId + "] already exists"); } - sinkHandler.addCompletionListener(ActionListener.running(() -> sinks.remove(exchangeId))); return sinkHandler; } @@ -111,6 +111,20 @@ public ExchangeSinkHandler getSinkHandler(String exchangeId) { return sinkHandler; } + /** + * Removes the exchange sink handler associated with the given exchange id. + * W will abort the sink handler if the given failure is not null. + */ + public void finishSinkHandler(String exchangeId, @Nullable Exception failure) { + final ExchangeSinkHandler sinkHandler = sinks.remove(exchangeId); + if (sinkHandler != null) { + if (failure != null) { + sinkHandler.onFailure(failure); + } + assert sinkHandler.isFinished() : "Exchange sink " + exchangeId + " wasn't finished yet"; + } + } + /** * Opens a remote sink handler on the remote node for the given session ID. */ @@ -201,7 +215,8 @@ protected void runInternal() { } long elapsed = nowInMillis - sink.lastUpdatedTimeInMillis(); if (elapsed > maxInterval.millis()) { - sink.onFailure( + finishSinkHandler( + e.getKey(), new ElasticsearchTimeoutException( "Exchange sink {} has been inactive for {}", e.getKey(), diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java index eae76495a4c69..ab155d6ee8479 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkHandler.java @@ -119,9 +119,9 @@ public boolean isFinished() { } /** - * Aborts and fails this exchange sink handler + * Fails this sink exchange handler */ - public void onFailure(Exception failure) { + void onFailure(Exception failure) { completionFuture.onFailure(failure); buffer.finish(true); notifyListeners(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index f479928d3b886..90cbc018b77dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -605,21 +605,19 @@ private class DataNodeRequestExecutor { List driverProfiles, ActionListener listener ) { - parentTask.addListener(() -> exchangeSink.onFailure(new TaskCancelledException(parentTask.getReasonCancelled()))); this.request = request; this.parentTask = parentTask; this.exchangeSink = exchangeSink; + this.listener = listener; this.driverProfiles = driverProfiles; this.maxConcurrentShards = maxConcurrentShards; this.blockingSink = exchangeSink.createExchangeSink(); - this.listener = listener.delegateResponse((l, e) -> { - blockingSink.finish(); - exchangeSink.onFailure(e); - l.onFailure(e); - }); } void start() { + parentTask.addListener( + () -> exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(parentTask.getReasonCancelled())) + ); runBatch(0); } @@ -636,9 +634,9 @@ private void runBatch(int startBatchIndex) { parentTask, computeContext, request.plan(), - listener.delegateFailureAndWrap((l, profiles) -> onBatchCompleted(endBatchIndex, profiles)) + ActionListener.wrap(profiles -> onBatchCompleted(endBatchIndex, profiles), this::onFailure) ); - }, listener::onFailure)); + }, this::onFailure)); } private void onBatchCompleted(int lastBatchIndex, List batchProfiles) { @@ -651,10 +649,18 @@ private void onBatchCompleted(int lastBatchIndex, List batchProfi blockingSink.finish(); // don't return until all pages are fetched exchangeSink.addCompletionListener( - ContextPreservingActionListener.wrapPreservingContext(listener, transportService.getThreadPool().getThreadContext()) + ContextPreservingActionListener.wrapPreservingContext( + ActionListener.runBefore(listener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)), + transportService.getThreadPool().getThreadContext() + ) ); } } + + private void onFailure(Exception e) { + exchangeService.finishSinkHandler(request.sessionId(), e); + listener.onFailure(e); + } } private void runComputeOnDataNode( @@ -669,19 +675,10 @@ private void runComputeOnDataNode( : List.of(); final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); - var externalSink = exchangeService.getSinkHandler(externalId); - listener = listener.delegateResponse((l, e) -> { - externalSink.onFailure(e); - l.onFailure(e); - }); try (RefCountingListener refs = new RefCountingListener(listener.map(i -> new ComputeResponse(collectedProfiles)))) { final AtomicBoolean cancelled = new AtomicBoolean(); // run compute with target shards var internalSink = exchangeService.createSinkHandler(request.sessionId(), request.pragmas().exchangeBufferSize()); - listener = listener.delegateResponse((l, e) -> { - internalSink.onFailure(e); - l.onFailure(e); - }); DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( request, task, @@ -692,7 +689,8 @@ private void runComputeOnDataNode( ); dataNodeRequestExecutor.start(); // run the node-level reduction - task.addListener(() -> externalSink.onFailure(new TaskCancelledException(task.getReasonCancelled()))); + var externalSink = exchangeService.getSinkHandler(externalId); + task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); @@ -713,13 +711,17 @@ private void runComputeOnDataNode( collectedProfiles.addAll(driverProfiles); } // don't return until all pages are fetched - externalSink.addCompletionListener(reductionListener); + externalSink.addCompletionListener( + ActionListener.runBefore(reductionListener, () -> exchangeService.finishSinkHandler(externalId, null)) + ); }, e -> { - externalSink.onFailure(e); + exchangeService.finishSinkHandler(externalId, e); reductionListener.onFailure(e); }) ); } catch (Exception e) { + exchangeService.finishSinkHandler(externalId, e); + exchangeService.finishSinkHandler(request.sessionId(), e); listener.onFailure(e); } } @@ -796,7 +798,9 @@ void runComputeOnRemoteCluster( ActionListener listener ) { final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); - parentTask.addListener(() -> exchangeSink.onFailure(new TaskCancelledException(parentTask.getReasonCancelled()))); + parentTask.addListener( + () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) + ); ThreadPool threadPool = transportService.getThreadPool(); final var responseHeadersCollector = new ResponseHeadersCollector(threadPool.getThreadContext()); listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); From 524747e1d69cca231c11c33c86e2f8b44297532f Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 19 Mar 2024 09:23:53 -0700 Subject: [PATCH 023/214] Dedupe terms in terms queries (#106381) This avoids duplication of terms in a terms query deduplicating these in the base MappedFieldType. --- docs/changelog/106381.yaml | 5 + .../index/mapper/BooleanFieldMapper.java | 4 +- .../index/mapper/MappedFieldType.java | 4 +- .../index/query/TermsQueryBuilderTests.java | 1 + .../wildcard/mapper/WildcardFieldMapper.java | 11 --- .../wildcard/mapper/TermsQueryTests.java | 93 +++++++++++++++++++ 6 files changed, 105 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/106381.yaml create mode 100644 x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java diff --git a/docs/changelog/106381.yaml b/docs/changelog/106381.yaml new file mode 100644 index 0000000000000..500f6d5416822 --- /dev/null +++ b/docs/changelog/106381.yaml @@ -0,0 +1,5 @@ +pr: 106381 +summary: Dedupe terms in terms queries +area: Mapping +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index cc01a487ad7b8..968c48abc54d8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -48,6 +48,7 @@ import java.time.ZoneId; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -317,8 +318,9 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { if (isIndexed()) { return super.termsQuery(values, context); } else { + Set dedupe = new HashSet<>(values); BooleanQuery.Builder builder = new BooleanQuery.Builder(); - for (Object value : values) { + for (Object value : dedupe) { builder.add(termQuery(value, context), BooleanClause.Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 265374a687312..1707871066645 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -49,6 +49,7 @@ import java.time.ZoneId; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -234,8 +235,9 @@ public Query termQueryCaseInsensitive(Object value, @Nullable SearchExecutionCon * {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link Occur#SHOULD} clauses * are generated with {@link #termQuery}. */ public Query termsQuery(Collection values, @Nullable SearchExecutionContext context) { + Set dedupe = new HashSet<>(values); BooleanQuery.Builder builder = new BooleanQuery.Builder(); - for (Object value : values) { + for (Object value : dedupe) { builder.add(termQuery(value, context), Occur.SHOULD); } return new ConstantScoreQuery(builder.build()); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 98096a49443a9..d7a1f70333ad8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -106,6 +106,7 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, .or(instanceOf(ConstantScoreQuery.class)) .or(instanceOf(MatchNoDocsQuery.class)) ); + // if (true) throw new IllegalArgumentException(randomTerms.toString()); if (query instanceof ConstantScoreQuery) { assertThat(((ConstantScoreQuery) query).getQuery(), instanceOf(BooleanQuery.class)); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 62306a18d946b..a07544ff68c9a 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; @@ -82,7 +81,6 @@ import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.util.ArrayList; -import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -843,15 +841,6 @@ public Query prefixQuery( return wildcardQuery(escapeWildcardSyntax(value) + "*", method, caseInsensitive, context); } - @Override - public Query termsQuery(Collection values, SearchExecutionContext context) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - for (Object value : values) { - bq.add(termQuery(value, context), Occur.SHOULD); - } - return new ConstantScoreQuery(bq.build()); - } - @Override public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java new file mode 100644 index 0000000000000..3b7e1777b3bc9 --- /dev/null +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.wildcard.mapper; + +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractBuilderTestCase; +import org.elasticsearch.xpack.wildcard.Wildcard; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class TermsQueryTests extends AbstractBuilderTestCase { + + protected Collection> getPlugins() { + return List.of(Wildcard.class); + } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + mapperService.merge("_doc", new CompressedXContent(org.elasticsearch.common.Strings.format(""" + { + "properties": { + "mapped_wildcard": { + "type": "wildcard" + } + } + }""")), MapperService.MergeReason.MAPPING_UPDATE); + } + + public void testSingleDuplicateTerms() throws IOException { + String[] duplicates = new String[1023]; + Arrays.fill(duplicates, "duplicate"); + QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates); + termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext()); + Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext()); + + QueryBuilder queryBuilder = new BoolQueryBuilder().should(new WildcardQueryBuilder("mapped_wildcard", "duplicate")); + queryBuilder = queryBuilder.rewrite(createQueryRewriteContext()); + Query expected = new ConstantScoreQuery(queryBuilder.toQuery(createSearchExecutionContext())); + + assertEquals(expected, actual); + } + + public void testMultiDuplicateTerms() throws IOException { + int numTerms = randomIntBetween(2, 10); + List randomTerms = new ArrayList<>(numTerms); + for (int i = 0; i < numTerms; ++i) { + randomTerms.add(randomAlphaOfLengthBetween(1, 1024)); + } + int totalTerms = randomIntBetween(numTerms * 5, 1023); + String[] duplicates = new String[totalTerms]; + for (int i = 0; i < numTerms; ++i) { + duplicates[i] = randomTerms.get(i); + } + for (int i = numTerms; i < totalTerms; ++i) { + duplicates[i] = randomTerms.get(randomIntBetween(0, numTerms - 1)); + } + + QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates); + termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext()); + Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext()); + + Set ordered = new HashSet<>(randomTerms); + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + for (String randomTerm : ordered) { + QueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder("mapped_wildcard", randomTerm); + wildcardQueryBuilder = wildcardQueryBuilder.rewrite(createQueryRewriteContext()); + boolQueryBuilder.should(wildcardQueryBuilder); + } + QueryBuilder expectedQueryBuilder = boolQueryBuilder.rewrite(createQueryRewriteContext()); + Query expected = new ConstantScoreQuery(expectedQueryBuilder.toQuery(createSearchExecutionContext())); + + assertEquals(expected, actual); + } +} From e14dd54ae974797e748617e9b5de3a3fc59426e0 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 19 Mar 2024 17:58:37 +0100 Subject: [PATCH 024/214] Support ST_INTERSECTS between two geometry columns (#104907) * Support ST_INTERSECTS between geometry column and other geometry or string * Pushdown to lucene for ST_INTERSECTS on GEO_POINT * Get geo_shape working in ST_INTERSECTS bypassing SingleValueQuery * Initial work to support cartesian shape queries in ESQL * Fixed CSV tests for combined ST_INTERSECTS and ST_CENTROID * Fixed bug in point-in-shape query for CARTESIAN_POINT * Added unit tests for SpatialIntersects and fixed a few bugs found * Added comments to public ShapeQueryBuilder class * Move calls to random() later to avoid security exception * Refined type checking support in ST_INTERSECTS Improved the combinations supported as preparation for removing the uly try/catch way of detecting the difference between WKT and WKB in some code. * Fixed bugs in incorrect use of doc-values in parameter type matching Also made a few reminfments, including removing one try/catch approach to differentiating between WKT and WKB. * Removed second place where we used try/catch to differentiate WKT from WKB This was a workaround for a mistake in the planning, where we incorrectly mapped incoming types to the wrong FieldEvaluators. We fixed that mistake in an earlier commit. * Fixed flaky tests were GEO was treated as CARTSIAN We assumed if the incoming types were constants, they had no CRS, even when they did, which was wrong. For shapes crossing the dateline this lead to different (incorrect) behaviour. * Fixed a flaky test by removing some point==point optimizations * Moved spatial intersects to 'spatial' package When we developed the ST_CENTROID work, this was requested, so let's do it here too. * Use normal switch on enums * Cleanup some static utility methods Now all code paths that can convert a constant string to a geometry use the same code. * Fixed bugs with non-quantized coordinates, and cleaned up code a little * Fixed failing test after change to evaluator class names * Refactored SpatialRelatesFunction into three files, and made evaluatorRules static This was a general cleanup, making the code more organized, but did also achieve static evaluator rules so we don't re-created these on every query parsing. * Fixed compile error after rebase * Removed ConstantAndConstant support, using fold() correctly instead * better error on circles * Make sure compound predicates are supported in use-doc-values pushdown * Testing ENRICH with ST_INTERSECTS This required adding new data for an ENRICH index, and this data could be tested with a few other related tests, which were also added. * Added missing mixed-cluster rules for testing only with 8.14 * Fixed some mixed-cluster issues where we failed to mark test for only 8.14 Also added an interesting polygon-polygon intersection case from real data. * Fix flaky test where cartesian polygons were generated from geo * Remove support for string literals in ST_INTERSECTS * Fix failing tests after removing string support * Removed unused code from previous string literal support (WKT parsing) * Support case where both fields are points and doc-values If we have an ST_INTERSECTS and an ST_CENTROID, the centroid asks to load the points as doc-values, and the ST_INTERSECTS needs to therefor support two doc-values points. * Disallow more than one field from doc-values for ST_INTERSECTS * Remove unused evaluator classes * Add tests for multiple doc-values if not in same intersects * Fix errors after rebase on main * Fixed bug in missing support for spatial function expressions in EVAL When a spatial aggregate expects doc-values, this was not being communicated to spatial functions in EVAL, only in WHERE. * Reduce flaky tests when reading directly from enrich source indices The test framework does not expect enrich source indices to be used directly in queries, leading to duplicated results on multi-node clusters, so we edit the queries to be less sensitive to this case. * Fixed failing test * Code style * Fixed test file name and added function name annotation * Added documentation for st_intersects * Fixed failing show functions test * Code review changes, notably simplifying the type resolution * Fixed broken docs link --- docs/changelog/104907.yaml | 6 + .../functions/signature/st_intersects.svg | 1 + .../esql/functions/spatial-functions.asciidoc | 2 + .../esql/functions/st_intersects.asciidoc | 40 + .../functions/types/st_intersects.asciidoc | 12 + .../xpack/esql/CsvTestsDataLoader.java | 32 +- .../src/main/resources/airports_mp.csv | 8 + .../resources/enrich-IT_tests_only.csv-spec | 42 +- .../src/main/resources/meta.csv-spec | 4 +- .../src/main/resources/spatial.csv-spec | 198 +++++ .../main/resources/spatial_shapes.csv-spec | 164 +++- ...ianPointDocValuesAndConstantEvaluator.java | 128 +++ ...esianPointDocValuesAndSourceEvaluator.java | 142 ++++ ...tsCartesianSourceAndConstantEvaluator.java | 132 +++ ...ectsCartesianSourceAndSourceEvaluator.java | 152 ++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 +++ ...tsGeoPointDocValuesAndSourceEvaluator.java | 151 ++++ ...tersectsGeoSourceAndConstantEvaluator.java | 132 +++ ...IntersectsGeoSourceAndSourceEvaluator.java | 152 ++++ .../esql/expression/EsqlTypeResolutions.java | 17 +- .../function/EsqlFunctionRegistry.java | 9 +- .../spatial/SpatialEvaluatorFactory.java | 212 +++++ .../scalar/spatial/SpatialIntersects.java | 226 +++++ .../spatial/SpatialRelatesFunction.java | 297 +++++++ .../scalar/spatial/SpatialRelatesUtils.java | 105 +++ .../xpack/esql/io/stream/PlanNamedTypes.java | 13 + .../optimizer/LocalPhysicalPlanOptimizer.java | 58 +- .../planner/EsqlExpressionTranslators.java | 66 ++ .../querydsl/query/SpatialRelatesQuery.java | 287 +++++++ .../xpack/esql/type/EsqlDataTypes.java | 12 +- .../expression/function/TestCaseSupplier.java | 48 +- .../spatial/SpatialIntersectsTests.java | 213 +++++ .../optimizer/PhysicalPlanOptimizerTests.java | 789 ++++++++++++++++-- .../xpack/ql/util/SpatialCoordinateTypes.java | 12 + 34 files changed, 3880 insertions(+), 110 deletions(-) create mode 100644 docs/changelog/104907.yaml create mode 100644 docs/reference/esql/functions/signature/st_intersects.svg create mode 100644 docs/reference/esql/functions/st_intersects.asciidoc create mode 100644 docs/reference/esql/functions/types/st_intersects.asciidoc create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java diff --git a/docs/changelog/104907.yaml b/docs/changelog/104907.yaml new file mode 100644 index 0000000000000..0d8592ae29526 --- /dev/null +++ b/docs/changelog/104907.yaml @@ -0,0 +1,6 @@ +pr: 104907 +summary: Support ST_INTERSECTS between geometry column and other geometry or string +area: "ES|QL" +type: enhancement +issues: +- 104874 diff --git a/docs/reference/esql/functions/signature/st_intersects.svg b/docs/reference/esql/functions/signature/st_intersects.svg new file mode 100644 index 0000000000000..491ba80aee5e5 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_intersects.svg @@ -0,0 +1 @@ +ST_INTERSECTS(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index d99fe36191a31..c1758f61de723 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -8,9 +8,11 @@ {esql} supports these spatial functions: // tag::spatial_list[] +* <> * <> * <> // end::spatial_list[] +include::st_intersects.asciidoc[] include::st_x.asciidoc[] include::st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc new file mode 100644 index 0000000000000..1bf4cef0e2977 --- /dev/null +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -0,0 +1,40 @@ +[discrete] +[[esql-st_intersects]] +=== `ST_INTERSECTS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +*Description* + +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ + +*Supported types* + +include::types/st_intersects.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-results] +|=== diff --git a/docs/reference/esql/functions/types/st_intersects.asciidoc b/docs/reference/esql/functions/types/st_intersects.asciidoc new file mode 100644 index 0000000000000..b061ebd41359c --- /dev/null +++ b/docs/reference/esql/functions/types/st_intersects.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index c5730f3271945..3cddf3c10a7fe 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -61,6 +61,7 @@ public class CsvTestsDataLoader { private static final TestsDataset HEIGHTS = new TestsDataset("heights", "mapping-heights.json", "heights.csv"); private static final TestsDataset DECADES = new TestsDataset("decades", "mapping-decades.json", "decades.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); + private static final TestsDataset AIRPORTS_MP = new TestsDataset("airports_mp", "mapping-airports.json", "airports_mp.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); private static final TestsDataset COUNTRIES_BBOX = new TestsDataset( "countries_bbox", @@ -91,6 +92,7 @@ public class CsvTestsDataLoader { Map.entry(HEIGHTS.indexName, HEIGHTS), Map.entry(DECADES.indexName, DECADES), Map.entry(AIRPORTS.indexName, AIRPORTS), + Map.entry(AIRPORTS_MP.indexName, AIRPORTS_MP), Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB), @@ -281,6 +283,7 @@ private static void loadCsvData( CheckedBiFunction p, Logger logger ) throws IOException { + ArrayList failures = new ArrayList<>(); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = org.elasticsearch.xpack.ql.TestUtils.reader(resource)) { String line; @@ -390,13 +393,19 @@ private static void loadCsvData( } lineNumber++; if (builder.length() > BULK_DATA_SIZE) { - sendBulkRequest(indexName, builder, client, logger); + sendBulkRequest(indexName, builder, client, logger, failures); builder.setLength(0); } } } if (builder.isEmpty() == false) { - sendBulkRequest(indexName, builder, client, logger); + sendBulkRequest(indexName, builder, client, logger, failures); + } + if (failures.isEmpty() == false) { + for (String failure : failures) { + logger.error(failure); + } + throw new IOException("Data loading failed with " + failures.size() + " errors: " + failures.get(0)); } } @@ -405,7 +414,8 @@ private static String quoteIfNecessary(String value) { return isQuoted ? value : "\"" + value + "\""; } - private static void sendBulkRequest(String indexName, StringBuilder builder, RestClient client, Logger logger) throws IOException { + private static void sendBulkRequest(String indexName, StringBuilder builder, RestClient client, Logger logger, List failures) + throws IOException { // The indexName is optional for a bulk request, but we use it for routing in MultiClusterSpecIT. builder.append("\n"); logger.debug("Sending bulk request of [{}] bytes for [{}]", builder.length(), indexName); @@ -422,14 +432,26 @@ private static void sendBulkRequest(String indexName, StringBuilder builder, Res if (Boolean.FALSE.equals(errors)) { logger.info("Data loading of [{}] bytes into [{}] OK", builder.length(), indexName); } else { - throw new IOException("Data loading of [" + indexName + "] failed with errors: " + errors); + addError(failures, indexName, builder, "errors: " + result); } } } else { - throw new IOException("Data loading of [" + indexName + "] failed with status: " + response.getStatusLine()); + addError(failures, indexName, builder, "status: " + response.getStatusLine()); } } + private static void addError(List failures, String indexName, StringBuilder builder, String message) { + failures.add( + format( + "Data loading of [{}] bytes into [{}] failed with {}: Data [{}...]", + builder.length(), + indexName, + message, + builder.substring(0, 100) + ) + ); + } + private static void forceMerge(RestClient client, Set indices, Logger logger) throws IOException { String pattern = String.join(",", indices); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv new file mode 100644 index 0000000000000..079ef2e419fff --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_mp.csv @@ -0,0 +1,8 @@ +abbrev:keyword,name:text, scalerank:integer,type:keyword, location:geo_point, country:keyword, city:keyword, city_location:geo_point +XXX, Atlantis Int'l, 1, mid, POINT(0 0), Atlantis, Atlantis, POINT(0 0) +LUH, Sahnewal, 9, small, POINT(75.9570722403652 30.8503598561702), India, Ludhiāna, POINT(75.85 30.91) +SSE, Solapur, 9, mid, POINT(75.9330597710755 17.625415183635), India, Solāpur, POINT(75.92 17.68) +IXR, Birsa Munda, 9, mid, POINT(85.3235970368767 23.3177245989962), India, Rānchi, POINT(85.33 23.36) +AWZ, Ahwaz, 9, mid, POINT(48.7471065435931 31.3431585560757), Iran, Ahvāz, POINT(48.6692 31.3203) +GWL, Gwalior, 9, [mid,military], POINT(78.2172186546348 26.285487697937), India, Gwalior, POINT(78.178 26.2215) +HOD, Hodeidah Int'l, 9, mid, POINT(42.97109630194 14.7552534413725), Yemen, Al Ḩudaydah, POINT(42.9511 14.8022) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index 1908a738c62ae..cec1157455b18 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -152,6 +152,7 @@ a:keyword | a_lang:keyword ["1", "2"] | ["English", "French"] ; + enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env @@ -170,6 +171,7 @@ client_ip:ip | count_env:i | max_env:keyword 172.21.3.15 | 2 | Production ; + enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env, client_cidr @@ -187,6 +189,7 @@ client_ip:ip | env:keyword | client_cidr:ip_range 172.21.2.162 | [Development, QA] | 172.21.2.0/24 ; + enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | WHERE birth_date > "1960-01-01" @@ -207,6 +210,7 @@ birth_year:long | age_group:keyword | count:long 1960 | Senior | 8 ; + enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | WHERE birth_date IS NOT NULL @@ -221,6 +225,7 @@ count:long | age_group:keyword 12 | Middle-aged ; + enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | ENRICH heights_policy ON height WITH height_group = description @@ -237,6 +242,7 @@ Tall | 1.8 | 1.99 | 25 Very Tall | 2.0 | 2.1 | 20 ; + enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM employees | ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description @@ -255,6 +261,7 @@ null | 1980 | null | Radical Eighties | 4 1950 | 1980 | Nifty Fifties | Radical Eighties | 34 ; + spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM airports | WHERE abbrev == "CPH" @@ -267,6 +274,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 ; + spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] FROM airports | WHERE abbrev == "CPH" @@ -279,6 +287,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 ; + spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_feature: esql.mv_warn @@ -290,7 +299,38 @@ FROM airports warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer POINT(1.396561 24.127649) | 872 | 88 | 1044 ; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 974ea8d72b73a..f448cd184d9b2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -64,6 +64,7 @@ sinh |"double sinh(n:double|integer|long|unsigned_long)"|n split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false @@ -166,6 +167,7 @@ double pi() "keyword split(str:keyword|text, delim:keyword|text)" "double sqrt(n:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" "double st_y(point:geo_point|cartesian_point)" "boolean starts_with(str:keyword|text, prefix:keyword|text)" @@ -219,5 +221,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -95 | 95 | 95 +96 | 96 | 96 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 02da586c6f357..88155301a06bc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -320,6 +320,173 @@ centroid:geo_point | count:long POINT(83.16847535921261 28.79002037679311) | 40 ; +centroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.12.99, reason:st_centroid added in 8.13] +FROM airports +| WHERE country == "United Kingdom" +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +centroidFromAirportsAfterIntersectsPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE country == "United Kingdom" +| STATS centroid = ST_CENTROID(location), count=COUNT() +| EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| KEEP centroid, count, centroid_in_uk, centroid_in_iceland +; + +centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +; + +centroidFromAirportsAfterIntersectsEvalExpression#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland +| SORT count ASC +; + +centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean +POINT (-21.946634463965893 64.13187285885215) | 1 | false | true +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +POINT (0.04453958108176276 23.74658354606057) | 873 | false | false +; + +centroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (42.97109629958868 14.7552534006536) | 1 +; + +centroidFromAirportsAfterIntersectsCompoundPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (42.97109629958868 14.7552534006536) | 1 +; + +pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +; + +pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +// tag::st_intersects-airports[] +FROM airports +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +// end::st_intersects-airports[] +; + +// tag::st_intersects-airports-results[] +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_intersects-airports-results[] +; + +literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +intersects:boolean +true +; + +cityInCityBoundary#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| EVAL in_city = ST_INTERSECTS(city_location, city_boundary) +| STATS count=COUNT(*) BY in_city +| SORT count ASC +| EVAL cardinality = CASE(count < 10, "very few", count < 100, "few", "many") +| KEEP cardinality, in_city +; + +cardinality:k | in_city:boolean +"few" | false +"many" | true +; + +cityNotInCityBoundaryBiggest#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| WHERE NOT ST_INTERSECTS(city_location, city_boundary) +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| SORT boundary_wkt_length DESC +| KEEP abbrev, airport, city, city_location, boundary_wkt_length, city_boundary +| LIMIT 1 +; + +abbrev:keyword | airport:text | city:keyword | city_location:geo_point | boundary_wkt_length:integer | city_boundary:geo_shape +SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.2533) | 598 | POLYGON((109.1802 18.4609, 109.2304 18.4483, 109.2311 18.4261, 109.2696 18.411, 109.2602 18.3581, 109.2273 18.348, 109.2286 18.2638, 109.2842 18.2665, 109.3518 18.2166, 109.4508 18.1936, 109.4895 18.2281, 109.5137 18.2283, 109.4914 18.2781, 109.5041 18.2948, 109.4809 18.3034, 109.5029 18.3422, 109.5249 18.3375, 109.4993 18.3632, 109.535 18.4007, 109.5104 18.4374, 109.5231 18.4474, 109.5321 18.53, 109.4992 18.5568, 109.4192 18.5646, 109.4029 18.6302, 109.3286 18.5772, 109.309 18.5191, 109.2913 18.5141, 109.2434 18.5607, 109.2022 18.5572, 109.1815 18.5163, 109.1908 18.4711, 109.1802 18.4609))) +; + +airportCityLocationPointIntersection#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_mp +| WHERE ST_INTERSECTS(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointIntersectionCentroid#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_mp +| WHERE ST_INTERSECTS(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + geoPointEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -534,6 +701,37 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; +cartesianCentroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + cartesianPointEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 7209812e0569c..69e56c7efe55d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -54,7 +54,7 @@ abbrev:keyword | name:text | location:geo_shape | cou "VLC" | "Valencia" | POINT(-0.473474930771676 39.4914597884489) | "Spain" | "Paterna" | POINT(-0.4406 39.5028) ; -simpleLoadFromCityBoundaries#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoadFromCityBoundaries#[skip:-8.13.99, reason:chunked CSV import support added in 8.14] FROM airport_city_boundaries | WHERE abbrev == "CPH" | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) @@ -66,8 +66,121 @@ abbrev:keyword | region:text | city_location:geo_point | airport:tex CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen | 265 ; -geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL location = TO_GEOSHAPE(location) +| WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +| KEEP abbrev, name, location, country, city, city_location +; + +abbrev:keyword | name:text | location:geo_shape | country:keyword | city:keyword | city_location:geo_point +HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) +; + +polygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airport_city_boundaries +| WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) +| KEEP abbrev, airport, region, city, city_location +| LIMIT 1 +; + +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +; + +pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports +| EVAL location = TO_GEOSHAPE(location) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +| KEEP abbrev, name, location, country, city, city_location +; + +abbrev:keyword | name:text | location:geo_shape | country:keyword | city:keyword | city_location:geo_point +HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) +; + +literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointAsShapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointAsShapeIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; +wkt:keyword | pt:geo_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +shapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM countries_bbox +| WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:geo_shape +ZAF | South Africa | BBOX(16.483327, 37.892218, -22.136391, -46.969727) +SWZ | Swaziland | BBOX(30.798336, 32.133400, -25.728336, -27.316391) +LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) +; + +literalPolygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] +| EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") +| MV_EXPAND wkt +| EVAL shape = TO_GEOSHAPE(wkt) +| WHERE ST_INTERSECTS(shape, other) +| KEEP wkt, shape, other +; + +wkt:keyword | shape:geo_shape | other:geo_shape +"POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))" | POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60)) | POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64)) +; + +literalPolygonIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) +; + +intersects:boolean +true +; + +geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -162,6 +275,53 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart "VLC" | "Valencia" | 8 | "mid" | POINT(-52706.98819688343 4792315.469321795) ; +cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM airports_web +| EVAL location = TO_CARTESIANSHAPE(location) +| WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| KEEP abbrev, name, location, scalerank, type +; + +abbrev:keyword | name:text | location:cartesian_shape | scalerank:i | type:k +HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | 9 | mid +; + +literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANSHAPE(wkt) +| WHERE ST_INTERSECTS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_shape +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +cartesianShapeIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +FROM countries_bbox_web +| WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472.9128583763, -3163056.5390926218) +LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) +; + +literalCartesianPolygonIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] +| MV_EXPAND wkt +| EVAL shape = TO_CARTESIANSHAPE(wkt) +| EVAL other = TO_CARTESIANSHAPE("POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400))") +| WHERE ST_INTERSECTS(shape, other) +; + +wkt:keyword | shape:cartesian_shape | other:cartesian_shape +"POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) +; + cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..e32357c42bf71 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..7bf47b766bd95 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialIntersects.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..979869dc86c56 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..6c47745d6af37 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..8d87884d04077 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..45e9daf5bc453 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..f043ff4104bbb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..9f5f1c7cc9674 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialIntersects}. + * This class is generated. Do not edit it. + */ +public final class SpatialIntersectsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialIntersectsGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialIntersects.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialIntersectsGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialIntersectsGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialIntersectsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index e774ba36b16e6..088e768684cf2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -17,6 +17,10 @@ import java.util.Locale; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -45,7 +49,18 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa return Expression.TypeResolution.TYPE_RESOLVED; } + private static final String[] SPATIAL_TYPE_NAMES = new String[] { + GEO_POINT.typeName(), + CARTESIAN_POINT.typeName(), + GEO_SHAPE.typeName(), + CARTESIAN_SHAPE.typeName() }; + private static final String[] POINT_TYPE_NAMES = new String[] { GEO_POINT.typeName(), CARTESIAN_POINT.typeName() }; + public static Expression.TypeResolution isSpatialPoint(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { - return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, "geo_point or cartesian_point"); + return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, POINT_TYPE_NAMES); + } + + public static Expression.TypeResolution isSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + return isType(e, EsqlDataTypes::isSpatial, operationName, paramOrd, SPATIAL_TYPE_NAMES); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index b4d69cc96bffd..e19048a40dda9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -78,6 +78,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -178,9 +179,11 @@ private FunctionDefinition[][] functions() { def(DateTrunc.class, DateTrunc::new, "date_trunc"), def(Now.class, Now::new, "now") }, // spatial - new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid") }, - new FunctionDefinition[] { def(StX.class, StX::new, "st_x") }, - new FunctionDefinition[] { def(StY.class, StY::new, "st_y") }, + new FunctionDefinition[] { + def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), + def(StX.class, StX::new, "st_x"), + def(StY.class, StY::new, "st_y") }, // conditional new FunctionDefinition[] { def(Case.class, Case::new, "case") }, // null diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java new file mode 100644 index 0000000000000..ccdd68e1806c1 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.Map; +import java.util.function.Function; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; + +/** + * SpatialRelatesFunction classes, like SpatialIntersects, support various combinations of incoming types, which can be sourced from + * constant literals (foldable), or from the index, which could provide either source values or doc-values. This class is used to + * create the appropriate evaluator for the given combination of types. + * @param + * @param + */ +abstract class SpatialEvaluatorFactory { + protected final TriFunction factoryCreator; + + SpatialEvaluatorFactory(TriFunction factoryCreator) { + this.factoryCreator = factoryCreator; + } + + public abstract EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier function, + Function toEvaluator + ); + + public static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( + SpatialSourceSupplier s, + Map> evaluatorRules, + Function toEvaluator + ) { + var evaluatorKey = new SpatialEvaluatorKey( + s.crsType(), + s.leftDocValues(), + s.rightDocValues(), + fieldKey(s.left()), + fieldKey(s.right()) + ); + SpatialEvaluatorFactory factory = evaluatorRules.get(evaluatorKey); + if (factory == null) { + evaluatorKey = evaluatorKey.swapSides(); + factory = evaluatorRules.get(evaluatorKey); + if (factory == null) { + throw evaluatorKey.unsupported(); + } + return factory.get(new SwappedSpatialSourceSupplier(s), toEvaluator); + } + return factory.get(s, toEvaluator); + } + + protected static SpatialEvaluatorFieldKey fieldKey(Expression expression) { + return new SpatialEvaluatorFieldKey(expression.dataType(), expression.foldable()); + } + + /** + * This interface defines a supplier of the key information needed by the spatial evaluator factories. + * The SpatialRelatesFunction will use this to supply the necessary information to the factories. + * When we need to swap left and right sides around, we can use a SwappableSpatialSourceSupplier. + */ + interface SpatialSourceSupplier { + Source source(); + + Expression left(); + + Expression right(); + + SpatialRelatesFunction.SpatialCrsType crsType(); + + boolean leftDocValues(); + + boolean rightDocValues(); + } + + protected static class SwappedSpatialSourceSupplier implements SpatialSourceSupplier { + private final SpatialSourceSupplier delegate; + + public SwappedSpatialSourceSupplier(SpatialSourceSupplier delegate) { + this.delegate = delegate; + } + + @Override + public Source source() { + return delegate.source(); + } + + @Override + public SpatialRelatesFunction.SpatialCrsType crsType() { + return delegate.crsType(); + } + + @Override + public boolean leftDocValues() { + return delegate.leftDocValues(); + } + + @Override + public boolean rightDocValues() { + return delegate.rightDocValues(); + } + + @Override + public Expression left() { + return delegate.right(); + } + + @Override + public Expression right() { + return delegate.left(); + } + } + + protected static class SpatialEvaluatorFactoryWithFields extends SpatialEvaluatorFactory< + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory> { + SpatialEvaluatorFactoryWithFields( + TriFunction< + Source, + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory, + EvalOperator.ExpressionEvaluator.Factory> factoryCreator + ) { + super(factoryCreator); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier s, + Function toEvaluator + ) { + return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), toEvaluator.apply(s.right())); + } + } + + protected static class SpatialEvaluatorWithConstantFactory extends SpatialEvaluatorFactory< + EvalOperator.ExpressionEvaluator.Factory, + Component2D> { + + SpatialEvaluatorWithConstantFactory( + TriFunction< + Source, + EvalOperator.ExpressionEvaluator.Factory, + Component2D, + EvalOperator.ExpressionEvaluator.Factory> factoryCreator + ) { + super(factoryCreator); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory get( + SpatialSourceSupplier s, + Function toEvaluator + ) { + return factoryCreator.apply(s.source(), toEvaluator.apply(s.left()), asLuceneComponent2D(s.crsType(), s.right())); + } + } + + protected record SpatialEvaluatorFieldKey(DataType dataType, boolean isConstant) {} + + protected record SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType crsType, + boolean leftDocValues, + boolean rightDocValues, + SpatialEvaluatorFieldKey left, + SpatialEvaluatorFieldKey right + ) { + SpatialEvaluatorKey(SpatialRelatesFunction.SpatialCrsType crsType, SpatialEvaluatorFieldKey left, SpatialEvaluatorFieldKey right) { + this(crsType, false, false, left, right); + } + + SpatialEvaluatorKey withLeftDocValues() { + return new SpatialEvaluatorKey(crsType, true, false, left, right); + } + + SpatialEvaluatorKey swapSides() { + return new SpatialEvaluatorKey(crsType, rightDocValues, leftDocValues, right, left); + } + + static SpatialEvaluatorKey fromSourceAndConstant(DataType left, DataType right) { + return new SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType.fromDataType(left), + new SpatialEvaluatorFieldKey(left, false), + new SpatialEvaluatorFieldKey(right, true) + ); + } + + static SpatialEvaluatorKey fromSources(DataType left, DataType right) { + return new SpatialEvaluatorKey( + SpatialRelatesFunction.SpatialCrsType.fromDataType(left), + new SpatialEvaluatorFieldKey(left, false), + new SpatialEvaluatorFieldKey(right, false) + ); + } + + UnsupportedOperationException unsupported() { + return new UnsupportedOperationException("Unsupported spatial relation combination: " + this); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java new file mode 100644 index 0000000000000..831c041caaa94 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -0,0 +1,226 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +public class SpatialIntersects extends SpatialRelatesFunction { + protected static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.INTERSECTS, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Intersects") + ); + protected static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.INTERSECTS, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Intersects") + ); + + @FunctionInfo(returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.") + public SpatialIntersects( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + private SpatialIntersects(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.INTERSECTS; + } + + @Override + public SpatialIntersects withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialIntersects(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialIntersects replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialIntersects(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialIntersects::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + protected Map> evaluatorRules() { + return evaluatorMap; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialIntersectsGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialIntersectsCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialIntersectsCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java new file mode 100644 index 0000000000000..cdd21682d0db7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.Component2DVisitor; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.apache.lucene.document.ShapeField.QueryRelation.DISJOINT; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; +import static org.elasticsearch.xpack.ql.type.DataTypes.isNull; + +public abstract class SpatialRelatesFunction extends BinaryScalarFunction + implements + EvaluatorMapper, + SpatialEvaluatorFactory.SpatialSourceSupplier { + protected SpatialCrsType crsType; + protected final boolean leftDocValues; + protected final boolean rightDocValues; + + protected SpatialRelatesFunction(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right); + this.leftDocValues = leftDocValues; + this.rightDocValues = rightDocValues; + } + + public abstract ShapeField.QueryRelation queryRelation(); + + @Override + public DataType dataType() { + return DataTypes.BOOLEAN; + } + + @Override + public SpatialCrsType crsType() { + if (crsType == null) { + resolveType(); + } + return crsType; + } + + @Override + protected TypeResolution resolveType() { + if (left().foldable() && right().foldable() == false || isNull(left().dataType())) { + // Left is literal, but right is not, check the left field's type against the right field + return resolveType(right(), left(), SECOND, FIRST); + } else { + // All other cases check the right against the left + return resolveType(left(), right(), FIRST, SECOND); + } + } + + private TypeResolution resolveType( + Expression leftExpression, + Expression rightExpression, + TypeResolutions.ParamOrdinal leftOrdinal, + TypeResolutions.ParamOrdinal rightOrdinal + ) { + TypeResolution leftResolution = isSpatial(leftExpression, sourceText(), leftOrdinal); + TypeResolution rightResolution = isSpatial(rightExpression, sourceText(), rightOrdinal); + if (leftResolution.resolved()) { + return resolveType(leftExpression, rightExpression, rightOrdinal); + } else if (rightResolution.resolved()) { + return resolveType(rightExpression, leftExpression, leftOrdinal); + } else { + return leftResolution; + } + } + + protected TypeResolution resolveType( + Expression spatialExpression, + Expression otherExpression, + TypeResolutions.ParamOrdinal otherParamOrdinal + ) { + if (isNull(spatialExpression.dataType())) { + return isSpatial(otherExpression, sourceText(), otherParamOrdinal); + } + TypeResolution resolution = isSameSpatialType(spatialExpression.dataType(), otherExpression, sourceText(), otherParamOrdinal); + if (resolution.unresolved()) { + return resolution; + } + crsType = SpatialCrsType.fromDataType(spatialExpression.dataType()); + return TypeResolution.TYPE_RESOLVED; + } + + public static TypeResolution isSameSpatialType( + DataType spatialDataType, + Expression expression, + String operationName, + TypeResolutions.ParamOrdinal paramOrd + ) { + return isType( + expression, + dt -> EsqlDataTypes.isSpatial(dt) && spatialCRSCompatible(spatialDataType, dt), + operationName, + paramOrd, + compatibleTypeNames(spatialDataType) + ); + } + + private static final String[] GEO_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; + private static final String[] CARTESIAN_TYPE_NAMES = new String[] { GEO_POINT.typeName(), GEO_SHAPE.typeName() }; + + private static boolean spatialCRSCompatible(DataType spatialDataType, DataType otherDataType) { + return EsqlDataTypes.isSpatialGeo(spatialDataType) && EsqlDataTypes.isSpatialGeo(otherDataType) + || EsqlDataTypes.isSpatialGeo(spatialDataType) == false && EsqlDataTypes.isSpatialGeo(otherDataType) == false; + } + + static String[] compatibleTypeNames(DataType spatialDataType) { + return EsqlDataTypes.isSpatialGeo(spatialDataType) ? GEO_TYPE_NAMES : CARTESIAN_TYPE_NAMES; + } + + @Override + public boolean foldable() { + return left().foldable() && right().foldable(); + } + + /** + * Mark the function as expecting the specified fields to arrive as doc-values. + */ + public abstract SpatialRelatesFunction withDocValues(Set attributes); + + /** + * Push-down to Lucene is only possible if one field is an indexed spatial field, and the other is a constant spatial or string column. + */ + public boolean canPushToSource(Predicate isAggregatable) { + // The use of foldable here instead of SpatialEvaluatorFieldKey.isConstant is intentional to match the behavior of the + // Lucene pushdown code in EsqlTranslationHandler::SpatialRelatesTranslator + // We could enhance both places to support ReferenceAttributes that refer to constants, but that is a larger change + return isPushableFieldAttribute(left(), isAggregatable) && right().foldable() + || isPushableFieldAttribute(right(), isAggregatable) && left().foldable(); + } + + private static boolean isPushableFieldAttribute(Expression exp, Predicate isAggregatable) { + return exp instanceof FieldAttribute fa + && fa.getExactInfo().hasExact() + && isAggregatable.test(fa) + && EsqlDataTypes.isSpatial(fa.dataType()); + } + + @Override + public int hashCode() { + // NB: the hashcode is currently used for key generation so + // to avoid clashes between aggs with the same arguments, add the class name as variation + return Objects.hash(getClass(), children(), leftDocValues, rightDocValues); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + SpatialRelatesFunction other = (SpatialRelatesFunction) obj; + return Objects.equals(other.children(), children()) + && Objects.equals(other.leftDocValues, leftDocValues) + && Objects.equals(other.rightDocValues, rightDocValues); + } + return false; + } + + public boolean leftDocValues() { + return leftDocValues; + } + + public boolean rightDocValues() { + return rightDocValues; + } + + /** + * Produce a map of rules defining combinations of incoming types to the evaluator factory that should be used. + */ + protected abstract Map> evaluatorRules(); + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return SpatialEvaluatorFactory.makeSpatialEvaluator(this, evaluatorRules(), toEvaluator); + } + + /** + * When performing local physical plan optimization, it is necessary to know if this function has a field attribute. + * This is because the planner might push down a spatial aggregation to lucene, which results in the field being provided + * as doc-values instead of source values, and this function needs to know if it should use doc-values or not. + */ + public boolean hasFieldAttribute(Set foundAttributes) { + return foundField(left(), foundAttributes) || foundField(right(), foundAttributes); + } + + protected boolean foundField(Expression expression, Set foundAttributes) { + return expression instanceof FieldAttribute field && foundAttributes.contains(field); + } + + protected enum SpatialCrsType { + GEO, + CARTESIAN, + UNSPECIFIED; + + public static SpatialCrsType fromDataType(DataType dataType) { + return EsqlDataTypes.isSpatialGeo(dataType) ? SpatialCrsType.GEO + : EsqlDataTypes.isSpatial(dataType) ? SpatialCrsType.CARTESIAN + : SpatialCrsType.UNSPECIFIED; + } + } + + protected static class SpatialRelations { + protected final ShapeField.QueryRelation queryRelation; + protected final SpatialCoordinateTypes spatialCoordinateType; + protected final CoordinateEncoder coordinateEncoder; + protected final ShapeIndexer shapeIndexer; + protected final SpatialCrsType crsType; + + protected SpatialRelations( + ShapeField.QueryRelation queryRelation, + SpatialCoordinateTypes spatialCoordinateType, + CoordinateEncoder encoder, + ShapeIndexer shapeIndexer + ) { + this.queryRelation = queryRelation; + this.spatialCoordinateType = spatialCoordinateType; + this.coordinateEncoder = encoder; + this.shapeIndexer = shapeIndexer; + this.crsType = spatialCoordinateType.equals(SpatialCoordinateTypes.GEO) ? SpatialCrsType.GEO : SpatialCrsType.CARTESIAN; + } + + protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws IOException { + Component2D rightComponent2D = asLuceneComponent2D(crsType, fromBytesRef(right)); + return geometryRelatesGeometry(left, rightComponent2D); + } + + private Geometry fromBytesRef(BytesRef bytesRef) { + return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); + } + + protected boolean geometryRelatesGeometry(BytesRef left, Component2D rightComponent2D) throws IOException { + Geometry leftGeom = fromBytesRef(left); + // We already have a Component2D for the right geometry, so we need to convert the left geometry to a doc-values byte array + return geometryRelatesGeometry(asGeometryDocValueReader(coordinateEncoder, shapeIndexer, leftGeom), rightComponent2D); + } + + protected boolean geometryRelatesGeometry(GeometryDocValueReader reader, Component2D rightComponent2D) throws IOException { + var visitor = Component2DVisitor.getVisitor(rightComponent2D, queryRelation, coordinateEncoder); + reader.visit(visitor); + return visitor.matches(); + } + + protected boolean pointRelatesGeometry(long encoded, Geometry geometry) { + Component2D component2D = asLuceneComponent2D(crsType, geometry); + return pointRelatesGeometry(encoded, component2D); + } + + protected boolean pointRelatesGeometry(long encoded, Component2D component2D) { + // This code path exists for doc-values points, and we could consider re-using the point class to reduce garbage creation + Point point = spatialCoordinateType.longAsPoint(encoded); + return geometryRelatesPoint(component2D, point); + } + + private boolean geometryRelatesPoint(Component2D component2D, Point point) { + boolean contains = component2D.contains(point.getX(), point.getY()); + return queryRelation == DISJOINT ? contains == false : contains; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java new file mode 100644 index 0000000000000..e088dbf7a70ec --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.geometry.Circle; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; + +import static org.elasticsearch.xpack.ql.planner.ExpressionTranslators.valueOf; + +public class SpatialRelatesUtils { + + /** + * This function is used to convert a spatial constant to a lucene Component2D. + * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. + */ + static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crsType, Expression expression) { + return asLuceneComponent2D(crsType, makeGeometryFromLiteral(expression)); + } + + static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crsType, Geometry geometry) { + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + var luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, t -> {}); + return LatLonGeometry.create(luceneGeometries); + } else { + var luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + return XYGeometry.create(luceneGeometries); + } + } + + /** + * This function is used to convert a spatial constant to a doc-values byte array. + * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. + */ + static GeometryDocValueReader asGeometryDocValueReader(SpatialRelatesFunction.SpatialCrsType crsType, Expression expression) + throws IOException { + Geometry geometry = makeGeometryFromLiteral(expression); + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + return asGeometryDocValueReader( + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "SpatialRelatesFunction"), + geometry + ); + } else { + return asGeometryDocValueReader(CoordinateEncoder.CARTESIAN, new CartesianShapeIndexer("SpatialRelatesFunction"), geometry); + } + + } + + /** + * Converting shapes into doc-values byte arrays is needed under two situations: + * - If both left and right are constants, we convert the right to Component2D and the left to doc-values for comparison + * - If the right is a constant and no lucene push-down was possible, we get WKB in the left and convert it to doc-values for comparison + */ + static GeometryDocValueReader asGeometryDocValueReader(CoordinateEncoder encoder, ShapeIndexer shapeIndexer, Geometry geometry) + throws IOException { + GeometryDocValueReader reader = new GeometryDocValueReader(); + CentroidCalculator centroidCalculator = new CentroidCalculator(); + if (geometry instanceof Circle) { + // Both the centroid calculator and the shape indexer do not support circles + throw new IllegalArgumentException(ShapeType.CIRCLE + " geometry is not supported"); + } + centroidCalculator.add(geometry); + reader.reset(GeometryDocValueWriter.write(shapeIndexer.indexShape(geometry), encoder, centroidCalculator)); + return reader; + } + + /** + * This function is used in two places, when evaluating a spatial constant in the SpatialRelatesFunction, as well as when + * we do lucene-pushdown of spatial functions. + */ + public static Geometry makeGeometryFromLiteral(Expression expr) { + Object value = valueOf(expr); + + if (value instanceof BytesRef bytesRef) { + return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); + } else { + throw new IllegalArgumentException( + "Unsupported combination of literal [" + value.getClass().getSimpleName() + "] of type [" + expr.dataType() + "]" + ); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 96a1ce9ed715e..9b3bc5a9cc045 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -102,6 +102,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -387,6 +388,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Pow.class, PlanNamedTypes::writePow, PlanNamedTypes::readPow), of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), + of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeIntersects, PlanNamedTypes::readIntersects), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), @@ -1470,6 +1472,17 @@ static void writeDateTrunc(PlanStreamOutput out, DateTrunc dateTrunc) throws IOE out.writeExpression(fields.get(1)); } + static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { + return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static void writeIntersects(PlanStreamOutput out, SpatialIntersects intersects) throws IOException { + List fields = intersects.children(); + assert fields.size() == 2; + out.writeExpression(fields.get(0)); + out.writeExpression(fields.get(1)); + } + static Now readNow(PlanStreamInput in) throws IOException { return new Now(in.readSource(), in.configuration()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 546f34d1b474c..50c893f18b15e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules.OptimizerRule; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -24,6 +25,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsTimeseriesQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; @@ -269,6 +271,8 @@ public static boolean canPushToSource(Expression exp, Predicate } else if (exp instanceof CIDRMatch cidrMatch) { return isAttributePushable(cidrMatch.ipField(), cidrMatch, hasIdenticalDelegate) && Expressions.foldable(cidrMatch.matches()); + } else if (exp instanceof SpatialRelatesFunction bc) { + return bc.canPushToSource(LocalPhysicalPlanOptimizer::isAggregatable); } return false; } @@ -453,7 +457,7 @@ public static boolean isPushableFieldAttribute(Expression exp, Predicate { @Override protected PhysicalPlan rule(AggregateExec aggregate) { - var foundAttributes = new HashSet(); + var foundAttributes = new HashSet(); PhysicalPlan plan = aggregate.transformDown(UnaryExec.class, exec -> { if (exec instanceof AggregateExec agg) { @@ -461,7 +465,8 @@ protected PhysicalPlan rule(AggregateExec aggregate) { var changedAggregates = false; for (NamedExpression aggExpr : agg.aggregates()) { if (aggExpr instanceof Alias as && as.child() instanceof SpatialAggregateFunction af) { - if (af.field() instanceof FieldAttribute fieldAttribute) { + if (af.field() instanceof FieldAttribute fieldAttribute + && allowedForDocValues(fieldAttribute, agg, foundAttributes)) { // We need to both mark the field to load differently, and change the spatial function to know to use it foundAttributes.add(fieldAttribute); changedAggregates = true; @@ -484,6 +489,36 @@ protected PhysicalPlan rule(AggregateExec aggregate) { ); } } + if (exec instanceof EvalExec evalExec) { + List fields = evalExec.fields(); + List changed = fields.stream() + .map( + f -> (Alias) f.transformDown( + SpatialRelatesFunction.class, + spatialRelatesFunction -> (spatialRelatesFunction.hasFieldAttribute(foundAttributes)) + ? spatialRelatesFunction.withDocValues(foundAttributes) + : spatialRelatesFunction + ) + ) + .toList(); + if (changed.equals(fields) == false) { + exec = new EvalExec(exec.source(), exec.child(), changed); + } + } + if (exec instanceof FilterExec filterExec) { + // Note that ST_CENTROID does not support shapes, but SpatialRelatesFunction does, so when we extend the centroid + // to support shapes, we need to consider loading shape doc-values for both centroid and relates (ST_INTERSECTS) + var condition = filterExec.condition() + .transformDown( + SpatialRelatesFunction.class, + spatialRelatesFunction -> (spatialRelatesFunction.hasFieldAttribute(foundAttributes)) + ? spatialRelatesFunction.withDocValues(foundAttributes) + : spatialRelatesFunction + ); + if (filterExec.condition().equals(condition) == false) { + exec = new FilterExec(filterExec.source(), filterExec.child(), condition); + } + } if (exec instanceof FieldExtractExec fieldExtractExec) { // Tell the field extractor that it should extract the field from doc-values instead of source values var attributesToExtract = fieldExtractExec.attributesToExtract(); @@ -501,5 +536,24 @@ protected PhysicalPlan rule(AggregateExec aggregate) { }); return plan; } + + /** + * This function disallows the use of more than one field for doc-values extraction in the same spatial relation function. + * This is because comparing two doc-values fields is not supported in the current implementation. + */ + private boolean allowedForDocValues(FieldAttribute fieldAttribute, AggregateExec agg, Set foundAttributes) { + var candidateDocValuesAttributes = new HashSet<>(foundAttributes); + candidateDocValuesAttributes.add(fieldAttribute); + var spatialRelatesAttributes = new HashSet(); + agg.forEachExpressionDown(SpatialRelatesFunction.class, relatesFunction -> { + candidateDocValuesAttributes.forEach(candidate -> { + if (relatesFunction.hasFieldAttribute(Set.of(candidate))) { + spatialRelatesAttributes.add(candidate); + } + }); + }); + // Disallow more than one spatial field to be extracted using doc-values (for now) + return spatialRelatesAttributes.size() < 2; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 33f8b4a5eddef..352403f9f1724 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -7,9 +7,11 @@ package org.elasticsearch.xpack.esql.planner; +import org.apache.lucene.document.ShapeField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; @@ -19,7 +21,10 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; +import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -50,6 +55,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; +import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; @@ -61,6 +67,7 @@ public final class EsqlExpressionTranslators { public static final List> QUERY_TRANSLATORS = List.of( new EqualsIgnoreCaseTranslator(), new BinaryComparisons(), + new SpatialRelatesTranslator(), new ExpressionTranslators.Ranges(), new ExpressionTranslators.BinaryLogic(), new ExpressionTranslators.IsNulls(), @@ -348,4 +355,63 @@ public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { return ExpressionTranslators.Scalars.doTranslate(f, handler); } } + + public static class SpatialRelatesTranslator extends ExpressionTranslator { + + @Override + protected Query asQuery(SpatialRelatesFunction bc, TranslatorHandler handler) { + return doTranslate(bc, handler); + } + + public static void checkSpatialRelatesFunction(Expression constantExpression, ShapeField.QueryRelation queryRelation) { + Check.isTrue( + constantExpression.foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [ST_{}]", + constantExpression.sourceLocation().getLineNumber(), + constantExpression.sourceLocation().getColumnNumber(), + Expressions.name(constantExpression), + queryRelation + ); + } + + /** + * We should normally be using the real `wrapFunctionQuery` above, so we get the benefits of `SingleValueQuery`, + * but at the moment `SingleValueQuery` makes use of `SortDocValues` to determine if the results are single or multi-valued, + * and LeafShapeFieldData does not support `SortedBinaryDocValues getBytesValues()`. + * Skipping this code path entirely is a temporary workaround while separate work is being done to simplify `SingleValueQuery` + * to rather rely on a new method on `LeafFieldData`. This is both for the benefit of the spatial queries, as well as an + * improvement overall. + * TODO: Remove this method and call the parent method once the SingleValueQuery improvements have been made + */ + public static Query wrapFunctionQuery(Expression field, Supplier querySupplier) { + return ExpressionTranslator.wrapIfNested(querySupplier.get(), field); + } + + public static Query doTranslate(SpatialRelatesFunction bc, TranslatorHandler handler) { + if (bc.left().foldable()) { + checkSpatialRelatesFunction(bc.left(), bc.queryRelation()); + return wrapFunctionQuery(bc.right(), () -> translate(bc, handler, bc.right(), bc.left())); + } else { + checkSpatialRelatesFunction(bc.right(), bc.queryRelation()); + return wrapFunctionQuery(bc.left(), () -> translate(bc, handler, bc.left(), bc.right())); + } + } + + static Query translate( + SpatialRelatesFunction bc, + TranslatorHandler handler, + Expression spatialExpression, + Expression constantExpression + ) { + TypedAttribute attribute = checkIsPushableAttribute(spatialExpression); + String name = handler.nameOf(attribute); + + try { + Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(constantExpression); + return new SpatialRelatesQuery(bc.source(), name, bc.queryRelation(), shape, attribute.dataType()); + } catch (IllegalArgumentException e) { + throw new QlIllegalArgumentException(e.getMessage(), e); + } + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java new file mode 100644 index 0000000000000..ca69569546ba3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.querydsl.query; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.document.XYDocValuesField; +import org.apache.lucene.document.XYPointField; +import org.apache.lucene.document.XYShape; +import org.apache.lucene.geo.XYGeometry; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.geo.LuceneGeometriesUtils; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.ShapeType; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.GeoShapeQueryable; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.lucene.spatial.CartesianShapeDocValuesQuery; +import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; + +public class SpatialRelatesQuery extends Query { + private final String field; + private final ShapeField.QueryRelation queryRelation; + private final Geometry shape; + private final DataType dataType; + + public SpatialRelatesQuery(Source source, String field, ShapeField.QueryRelation queryRelation, Geometry shape, DataType dataType) { + super(source); + this.field = field; + this.queryRelation = queryRelation; + this.shape = shape; + this.dataType = dataType; + } + + @Override + public boolean containsNestedField(String path, String field) { + return false; + } + + @Override + public Query addNestedField(String path, String field, String format, boolean hasDocValues) { + return null; + } + + @Override + public void enrichNestedSort(NestedSortBuilder sort) { + + } + + @Override + public QueryBuilder asBuilder() { + return EsqlDataTypes.isSpatialGeo(dataType) ? new GeoShapeQueryBuilder() : new CartesianShapeQueryBuilder(); + } + + @Override + protected String innerToString() { + throw new IllegalArgumentException("SpatialRelatesQuery.innerToString() not implemented"); + } + + @Override + public int hashCode() { + return Objects.hash(field, queryRelation, shape, dataType); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + SpatialRelatesQuery other = (SpatialRelatesQuery) obj; + return Objects.equals(field, other.field) + && Objects.equals(queryRelation, other.queryRelation) + && Objects.equals(shape, other.shape) + && Objects.equals(dataType, other.dataType); + } + + public ShapeRelation shapeRelation() { + return switch (queryRelation) { + case INTERSECTS -> ShapeRelation.INTERSECTS; + case DISJOINT -> ShapeRelation.DISJOINT; + case WITHIN -> ShapeRelation.WITHIN; + case CONTAINS -> ShapeRelation.CONTAINS; + }; + } + + /** + * This class is a minimal implementation of the QueryBuilder interface. + * We only need the toQuery method, but ESQL makes extensive use of QueryBuilder and trimming that interface down for ESQL only would + * be a large undertaking. + * Note that this class is only public for testing in PhysicalPlanOptimizerTests. + */ + public abstract class ShapeQueryBuilder implements QueryBuilder { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException("Unimplemented: toXContent()"); + } + + @Override + public org.apache.lucene.search.Query toQuery(SearchExecutionContext context) throws IOException { + final MappedFieldType fieldType = context.getFieldType(field); + if (fieldType == null) { + throw new QueryShardException(context, "failed to find type for field [" + field + "]"); + } + return buildShapeQuery(context, fieldType); + } + + abstract org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType); + + @Override + public QueryBuilder queryName(String queryName) { + throw new UnsupportedOperationException("Unimplemented: String"); + } + + @Override + public String queryName() { + throw new UnsupportedOperationException("Unimplemented: queryName"); + } + + @Override + public float boost() { + return 0; + } + + @Override + public QueryBuilder boost(float boost) { + throw new UnsupportedOperationException("Unimplemented: float"); + } + + @Override + public String getName() { + throw new UnsupportedOperationException("Unimplemented: getName"); + } + + /** Public for testing */ + public String fieldName() { + return field; + } + + /** Public for testing */ + public ShapeRelation relation() { + return shapeRelation(); + } + + /** Public for testing */ + public Geometry shape() { + return shape; + } + } + + private class GeoShapeQueryBuilder extends ShapeQueryBuilder { + public final String NAME = "geo_shape"; + + @Override + public String getWriteableName() { + return "GeoShapeQueryBuilder"; + } + + @Override + org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType) { + if ((fieldType instanceof GeoShapeQueryable) == false) { + throw new QueryShardException( + context, + "Field [" + field + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" + ); + } + final GeoShapeQueryable ft = (GeoShapeQueryable) fieldType; + return new ConstantScoreQuery(ft.geoShapeQuery(context, fieldType.name(), shapeRelation(), shape)); + } + } + + private class CartesianShapeQueryBuilder extends ShapeQueryBuilder { + @Override + public String getWriteableName() { + return "CartesianShapeQueryBuilder"; + } + + @Override + org.apache.lucene.search.Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType) { + org.apache.lucene.search.Query innerQuery = dataType == CARTESIAN_POINT + ? pointShapeQuery(shape, fieldType.name(), queryRelation, context) + : shapeShapeQuery(shape, fieldType.name(), queryRelation, context); + return new ConstantScoreQuery(innerQuery); + } + + /** + * This code is based on the ShapeQueryPointProcessor.shapeQuery() method + */ + private static org.apache.lucene.search.Query pointShapeQuery( + Geometry geometry, + String fieldName, + ShapeField.QueryRelation relation, + SearchExecutionContext context + ) { + final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); + // only the intersects relation is supported for indexed cartesian point types + if (relation != ShapeField.QueryRelation.INTERSECTS) { + throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + } + final Consumer checker = t -> { + if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { + throw new QueryShardException(context, "Field [" + fieldName + "] does not support " + t + " queries"); + } + }; + final XYGeometry[] luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, checker); + org.apache.lucene.search.Query query = XYPointField.newGeometryQuery(fieldName, luceneGeometries); + if (hasDocValues) { + final org.apache.lucene.search.Query queryDocValues = XYDocValuesField.newSlowGeometryQuery(fieldName, luceneGeometries); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; + } + + /** + * This code is based on the ShapeQueryProcessor.shapeQuery() method + */ + private static org.apache.lucene.search.Query shapeShapeQuery( + Geometry geometry, + String fieldName, + ShapeField.QueryRelation relation, + SearchExecutionContext context + ) { + final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); + // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); + if (relation == ShapeField.QueryRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { + throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + } + if (geometry == null || geometry.isEmpty()) { + return new MatchNoDocsQuery(); + } + final XYGeometry[] luceneGeometries; + try { + luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + } catch (IllegalArgumentException e) { + throw new QueryShardException(context, "Exception creating query on Field [" + fieldName + "] " + e.getMessage(), e); + } + org.apache.lucene.search.Query query = XYShape.newGeometryQuery(fieldName, relation, luceneGeometries); + if (hasDocValues) { + final org.apache.lucene.search.Query queryDocValues = new CartesianShapeDocValuesQuery( + fieldName, + relation, + luceneGeometries + ); + query = new IndexOrDocValuesQuery(query, queryDocValues); + } + return query; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 0813069330879..8edee89832255 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -45,10 +45,10 @@ public final class EsqlDataTypes { public static final DataType DATE_PERIOD = new DataType("DATE_PERIOD", null, 3 * Integer.BYTES, false, false, false); public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); - public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false); - public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, false); - public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, false); - public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, false); + public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, true); + public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, true); + public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, true); + public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, true); private static final Collection TYPES = Stream.of( BOOLEAN, @@ -175,6 +175,10 @@ public static boolean isSpatial(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; } + public static boolean isSpatialGeo(DataType t) { + return t == GEO_POINT || t == GEO_SHAPE; + } + public static boolean isSpatialPoint(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 38389369fe6b3..84f1b31fc8705 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -214,6 +214,16 @@ private static void casesCrossProduct( } } + public static TestCaseSupplier testCaseSupplier( + TypedDataSupplier lhsSupplier, + TypedDataSupplier rhsSupplier, + BiFunction evaluatorToString, + DataType expectedType, + BinaryOperator expectedValue + ) { + return testCaseSupplier(lhsSupplier, rhsSupplier, evaluatorToString, expectedType, expectedValue, List.of()); + } + private static TestCaseSupplier testCaseSupplier( TypedDataSupplier lhsSupplier, TypedDataSupplier rhsSupplier, @@ -938,31 +948,53 @@ public static List timeDurationCases() { ); } - private static List geoPointCases() { - return List.of(new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint()), EsqlDataTypes.GEO_POINT)); + public static List geoPointCases() { + return geoPointCases(ESTestCase::randomBoolean); } - private static List cartesianPointCases() { + public static List cartesianPointCases() { + return cartesianPointCases(ESTestCase::randomBoolean); + } + + public static List geoShapeCases() { + return geoShapeCases(ESTestCase::randomBoolean); + } + + public static List cartesianShapeCases() { + return cartesianShapeCases(ESTestCase::randomBoolean); + } + + public static List geoPointCases(Supplier hasAlt) { return List.of( - new TypedDataSupplier("", () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()), EsqlDataTypes.CARTESIAN_POINT) + new TypedDataSupplier("", () -> GEO.asWkb(GeometryTestUtils.randomPoint(hasAlt.get())), EsqlDataTypes.GEO_POINT) + ); + } + + public static List cartesianPointCases(Supplier hasAlt) { + return List.of( + new TypedDataSupplier( + "", + () -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint(hasAlt.get())), + EsqlDataTypes.CARTESIAN_POINT + ) ); } - private static List geoShapeCases() { + public static List geoShapeCases(Supplier hasAlt) { return List.of( new TypedDataSupplier( "", - () -> GEO.asWkb(GeometryTestUtils.randomGeometry(ESTestCase.randomBoolean())), + () -> GEO.asWkb(GeometryTestUtils.randomGeometryWithoutCircle(0, hasAlt.get())), EsqlDataTypes.GEO_SHAPE ) ); } - private static List cartesianShapeCases() { + public static List cartesianShapeCases(Supplier hasAlt) { return List.of( new TypedDataSupplier( "", - () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(ESTestCase.randomBoolean())), + () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(hasAlt.get())), EsqlDataTypes.CARTESIAN_SHAPE ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java new file mode 100644 index 0000000000000..e36d92fecd81f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import joptsimple.internal.Strings; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; + +@FunctionName("st_intersects") + +public class SpatialIntersectsTests extends AbstractFunctionTestCase { + public SpatialIntersectsTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialIntersects(source, args.get(0), args.get(1)); + } + + private static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { + for (DataType leftType : dataTypes) { + TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); + for (DataType rightType : dataTypes) { + if (typeCompatible(leftType, rightType)) { + TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); + suppliers.add( + TestCaseSupplier.testCaseSupplier( + leftDataSupplier, + rightDataSupplier, + SpatialIntersectsTests::spatialEvaluatorString, + DataTypes.BOOLEAN, + (l, r) -> expected(l, leftType, r, rightType) + ) + ); + } + } + } + } + + /** + * Build the expected error message for an invalid type signature. + */ + protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { + List badArgPositions = new ArrayList<>(); + for (int i = 0; i < types.size(); i++) { + if (validPerPosition.get(i).contains(types.get(i)) == false) { + badArgPositions.add(i); + } + } + if (badArgPositions.size() == 0) { + return oneInvalid(1, 0, includeOrdinal, types); + } else if (badArgPositions.size() == 1) { + int badArgPosition = badArgPositions.get(0); + int goodArgPosition = badArgPosition == 0 ? 1 : 0; + if (isSpatial(types.get(goodArgPosition)) == false) { + return oneInvalid(badArgPosition, -1, includeOrdinal, types); + } else { + return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); + } + } else { + return oneInvalid(0, -1, includeOrdinal, types); + } + } + + private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { + String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; + String expectedType = goodArgPosition >= 0 + ? compatibleTypes(types.get(goodArgPosition)) + : "geo_point, cartesian_point, geo_shape or cartesian_shape"; + String name = types.get(badArgPosition).typeName(); + return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + } + + private static String compatibleTypes(DataType spatialDataType) { + return Strings.join(compatibleTypeNames(spatialDataType), " or "); + } + + private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { + return switch (dataType.esType()) { + case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); + case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); + case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); + case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); + default -> throw new IllegalArgumentException("Unsupported datatype for ST_INTERSECTS: " + dataType); + }; + } + + private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { + if (typeCompatible(leftType, rightType) == false) { + return null; + } + // TODO cast objects to right type and check intersection + BytesRef leftWKB = asGeometryWKB(left, leftType); + BytesRef rightWKB = asGeometryWKB(right, rightType); + SpatialRelatesFunction.SpatialRelations spatialIntersects = spatialRelations(left, leftType, right, rightType); + try { + return spatialIntersects.geometryRelatesGeometry(leftWKB, rightWKB); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static SpatialRelatesFunction.SpatialRelations spatialRelations( + Object left, + DataType leftType, + Object right, + DataType rightType + ) { + if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { + return SpatialIntersects.GEO; + } else if (isSpatial(leftType) || isSpatial(rightType)) { + return SpatialIntersects.CARTESIAN; + } else { + throw new IllegalArgumentException( + "Unsupported left and right types: left[" + + leftType.esType() + + ":" + + left.getClass().getSimpleName() + + "] right[" + + rightType.esType() + + ":" + + right.getClass().getSimpleName() + + "]" + ); + } + } + + private static BytesRef asGeometryWKB(Object object, DataType dataType) { + if (isString(dataType)) { + return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); + } else if (object instanceof BytesRef wkb) { + return wkb; + } else { + throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); + } + } + + private static boolean typeCompatible(DataType leftType, DataType rightType) { + if (isSpatial(leftType) && isSpatial(rightType)) { + // Both must be GEO_* or both must be CARTESIAN_* + return countGeo(leftType, rightType) != 1; + } + return true; + } + + private static DataType pickSpatialType(DataType leftType, DataType rightType) { + if (isSpatial(leftType)) { + return leftType; + } else if (isSpatial(rightType)) { + return rightType; + } else { + throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); + } + } + + private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; + return "SpatialIntersects" + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + } + + private static int countGeo(DataType... types) { + int count = 0; + for (DataType type : types) { + if (isSpatialGeo(type)) { + count++; + } + } + return count; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 80b9c07095ee9..650cd2c81115c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -9,11 +9,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -40,6 +42,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -68,6 +71,7 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; +import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -82,6 +86,7 @@ import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.expression.function.aggregate.SpatialAggregateFunction; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -95,6 +100,7 @@ import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; +import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -103,7 +109,6 @@ import static java.util.Arrays.asList; import static org.elasticsearch.core.Tuple.tuple; -import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.DOC_VALUES; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -142,14 +147,17 @@ public class PhysicalPlanOptimizerTests extends ESTestCase { private LogicalPlanOptimizer logicalOptimizer; private PhysicalPlanOptimizer physicalPlanOptimizer; private Mapper mapper; - private Map mapping; - private Analyzer analyzer; - private int allFieldRowSize; - private static Map mappingAirports; - private static Analyzer analyzerAirports; + private TestDataSource testData; + private int allFieldRowSize; // TODO: Move this into testDataSource so tests that load other indexes can also assert on this + private TestDataSource airports; + private TestDataSource airportsWeb; + private TestDataSource countriesBbox; + private TestDataSource countriesBboxWeb; private final EsqlConfiguration config; + private record TestDataSource(Map mapping, EsIndex index, Analyzer analyzer) {} + @ParametersFactory(argumentFormatting = PARAM_FORMATTING) public static List readScriptSpec() { return settings().stream().map(t -> { @@ -173,13 +181,10 @@ public void init() { physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); - // Most tests used data from the test index, so we load it here, and use it in the plan() function. - mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping, Set.of("test")); - IndexResolution getIndexResult = IndexResolution.valid(test); var enrichResolution = setupEnrichResolution(); - analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); - allFieldRowSize = mapping.values() + // Most tests used data from the test index, so we load it here, and use it in the plan() function. + this.testData = makeTestDataSource("test", "mapping-basic.json", functionRegistry, enrichResolution); + allFieldRowSize = testData.mapping.values() .stream() .mapToInt( f -> (EstimatesRowSize.estimateSize(EsqlDataTypes.widenSmallNumericTypes(f.getDataType())) + f.getProperties() @@ -191,16 +196,31 @@ public void init() { ) .sum(); - // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. - mappingAirports = loadMapping("mapping-airports.json"); - EsIndex airports = new EsIndex("airports", mappingAirports, Set.of("airports")); - IndexResolution getIndexResultAirports = IndexResolution.valid(airports); - analyzerAirports = new Analyzer( - new AnalyzerContext(config, functionRegistry, getIndexResultAirports, enrichResolution), - TEST_VERIFIER + // Some tests use data from the airports and countries indexes, so we load that here, and use it in the plan(q, airports) function. + this.airports = makeTestDataSource("airports", "mapping-airports.json", functionRegistry, enrichResolution); + this.airportsWeb = makeTestDataSource("airports_web", "mapping-airports_web.json", functionRegistry, enrichResolution); + this.countriesBbox = makeTestDataSource("countriesBbox", "mapping-countries_bbox.json", functionRegistry, enrichResolution); + this.countriesBboxWeb = makeTestDataSource( + "countriesBboxWeb", + "mapping-countries_bbox_web.json", + functionRegistry, + enrichResolution ); } + TestDataSource makeTestDataSource( + String indexName, + String mappingFileName, + FunctionRegistry functionRegistry, + EnrichResolution enrichResolution + ) { + Map mapping = loadMapping(mappingFileName); + EsIndex index = new EsIndex(indexName, mapping, Set.of("test")); + IndexResolution getIndexResult = IndexResolution.valid(index); + Analyzer analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); + return new TestDataSource(mapping, index, analyzer); + } + private static EnrichResolution setupEnrichResolution() { EnrichResolution enrichResolution = new EnrichResolution(); enrichResolution.addResolvedPolicy( @@ -319,7 +339,10 @@ public void testSingleFieldExtractor() { var filter = as(limit.child(), FilterExec.class); var extract = as(filter.child(), FieldExtractExec.class); - assertEquals(Sets.difference(allFields(mapping), Set.of("emp_no")), Sets.newHashSet(names(restExtract.attributesToExtract()))); + assertEquals( + Sets.difference(allFields(testData.mapping), Set.of("emp_no")), + Sets.newHashSet(names(restExtract.attributesToExtract())) + ); assertEquals(Set.of("emp_no"), Sets.newHashSet(names(extract.attributesToExtract()))); var query = as(extract.child(), EsQueryExec.class); @@ -355,7 +378,10 @@ public void testExactlyOneExtractorPerFieldWithPruning() { var filter = as(limit.child(), FilterExec.class); var extract = as(filter.child(), FieldExtractExec.class); - assertEquals(Sets.difference(allFields(mapping), Set.of("emp_no")), Sets.newHashSet(names(restExtract.attributesToExtract()))); + assertEquals( + Sets.difference(allFields(testData.mapping), Set.of("emp_no")), + Sets.newHashSet(names(restExtract.attributesToExtract())) + ); assertThat(names(extract.attributesToExtract()), contains("emp_no")); var query = source(extract.child()); @@ -2253,10 +2279,10 @@ public void testPartialAggFoldingOutputForSyntheticAgg() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValues() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(location) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2278,13 +2304,12 @@ public void testSpatialTypesAndStatsUseDocValues() { agg = as(exchange.child(), AggregateExec.class); // below the exchange (in data node) the aggregation is using doc-values assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); - var filter = as(agg.child(), FilterExec.class); - var extract = as(filter.child(), FieldExtractExec.class); + var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2311,10 +2336,10 @@ public void testSpatialTypesAndStatsUseDocValues() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesNested() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(to_geopoint(location)) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2351,6 +2376,8 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { } /** + * This test does not have real index fields, and therefor asserts that doc-values field extraction does NOT occur. + * * Before local optimizations: * * LimitExec[1000[INTEGER]] @@ -2370,10 +2397,10 @@ public void testSpatialTypesAndStatsUseDocValuesNested() { * 36 35 33 36 29][KEYWORD] AS wkt]] */ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" row wkt = "POINT(42.97109629958868 14.7552534006536)" | stats centroid = st_centroid(to_geopoint(wkt)) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2424,10 +2451,10 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" from airports | stats centroid = st_centroid(location), count = COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2456,10 +2483,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2490,10 +2517,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregations() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2525,10 +2552,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { assertAggregation(agg, "cities", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); } /** @@ -2555,11 +2582,11 @@ public void testSpatialTypesAndStatsUseDocValuesMultiSpatialAggregations() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2589,10 +2616,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); var source = source(extract.child()); var qb = as(source.query(), SingleValueQuery.Builder.class); assertThat("Expected predicate to be passed to Lucene query", qb.source().text(), equalTo("scalerank == 9")); @@ -2623,10 +2650,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsFiltered() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2660,10 +2687,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); source(extract.child()); } @@ -2693,11 +2720,11 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGrouped() { * Note the FieldExtractExec has 'location' set for stats: FieldExtractExec[location{f}#9][location{f}#9] */ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregated() { - var plan = physicalPlanAirports(""" + var plan = this.physicalPlan(""" FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2753,10 +2780,10 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregat assertAggregation(agg, "count", Count.class); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); var extract = as(agg.child(), FieldExtractExec.class); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); source(extract.child()); } @@ -2779,18 +2806,18 @@ public void testSpatialTypesAndStatsUseDocValuesMultiAggregationsGroupedAggregat * \_AggregateExec[[],[SPATIALCENTROID(city_location{f}#16) AS centroid],PARTIAL,50] * \_EnrichExec[ANY,geo_match,city_location{f}#16,city_boundaries,city_boundary,{=airport_city_boundaries},[airport{r}#21, * region{r}#22, city_boundary{r}#23]] - * \_FilterExec[ISNOTNULL(city_location{f}#16)] - * \_FieldExtractExec[city_location{f}#16][city_location{f}#16]> - * \_EsQueryExec[airports], query[][_doc{f}#46], limit[], sort[] estimatedRowSize[204] + * \_FieldExtractExec[city_location{f}#16][city_location{f}#16] + * \_EsQueryExec[airports], query[{"exists":{"field":"city_location","boost":1.0}}][_doc{f}#46], limit[], sort[] + * estimatedRowSize[204] * * Note the FieldExtractExec has 'city_location' set for doc-values: FieldExtractExec[city_location{f}#16][city_location{f}#16] */ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { - var plan = physicalPlanAirports(""" + var plan = physicalPlan(""" from airports | enrich city_boundaries ON city_location WITH airport, region, city_boundary | stats centroid = st_centroid(city_location) - """); + """, airports); var limit = as(plan, LimitExec.class); var agg = as(limit.child(), AggregateExec.class); @@ -2820,13 +2847,578 @@ public void testEnrichBeforeSpatialAggregationSupportsDocValues() { assertThat(enrichExec.mode(), equalTo(Enrich.Mode.ANY)); assertThat(enrichExec.concreteIndices(), equalTo(Map.of("", "airport_city_boundaries"))); assertThat(enrichExec.enrichFields().size(), equalTo(3)); - var filter = as(enrichExec.child(), FilterExec.class); - var extract = as(filter.child(), FieldExtractExec.class); + var extract = as(enrichExec.child(), FieldExtractExec.class); + source(extract.child()); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + } + + /** + * Plan: + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Limit[500[INTEGER]] + * \_Filter[SPATIALINTERSECTS(location{f}#7,[50 4f 4c 59 47 4f 4e 28 29][KEYWORD])] + * \_EsRelation[airports][abbrev{f}#3, city{f}#9, city_location{f}#10, countr..]]] + * + * Optimized: + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[abbrev{f}#3, city{f}#9, city_location{f}#10, country{f}#8, location{f}#7, name{f}#4, scalerank{f}#5, type{f}# + * 6]] + * \_FieldExtractExec[abbrev{f}#3, city{f}#9, city_location{f}#10, countr..][] + * \_EsQueryExec[airports], query[{ + * "esql_single_value":{ + * "field":"location", + * "next":{ + * "geo_shape":{ + * "location":{ + * "shape":{ + * "type":"Polygon", + * "coordinates":[[[42.0,14.0],[43.0,14.0],[43.0,15.0],[42.0,15.0],[42.0,14.0]]] + * }, + * "relation":"intersects" + * }, + * "ignore_unmapped":false, + * "boost":1.0 + * } + * }, + * "source":"ST_INTERSECTS(location, \"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")@2:9" + * } + * }][_doc{f}#19], limit[500], sort[] estimatedRowSize[358] + */ + public void testPushSpatialIntersectsStringToSource() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + """, """ + FROM airports + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + /** + * Plan: + * Plan: + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#16, xDel{r}#17, yVal{r}#18, yDel{r}#19, count{r}#20, count{r}#21, seen{r}#22],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count]] + * \_Filter[SPATIALINTERSECTS(location{f}#12,[50 4f 4c 59 47 4f 4e 28 28 34 32 20 31 34 2c 20 34 33 20 31 34 2c 20 34 33 2 + * 0 31 35 2c 20 34 32 20 31 35 2c 20 34 32 20 31 34 29 29][KEYWORD])] + * \_EsRelation[airports][abbrev{f}#8, city{f}#14, city_location{f}#15, count..]]] + * + * Optimized: + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],FINAL,58] + * \_ExchangeExec[[xVal{r}#16, xDel{r}#17, yVal{r}#18, yDel{r}#19, count{r}#20, count{r}#21, seen{r}#22],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#12) AS centroid, COUNT([2a][KEYWORD]) AS count],PARTIAL,58] + * \_FieldExtractExec[location{f}#12][location{f}#12] + * \_EsQueryExec[airports], query[{ + * "esql_single_value":{ + * "field":"location", + * "next":{ + * "geo_shape":{ + * "location":{ + * "shape":{ + * "type":"Polygon", + * "coordinates":[[[42.0,14.0],[43.0,14.0],[43.0,15.0],[42.0,15.0],[42.0,14.0]]] + * }, + * "relation":"intersects" + * }, + * "ignore_unmapped":false, + * "boost":1.0 + * } + * }, + * "source":"ST_INTERSECTS(location, \"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")@2:9" + * } + * }][_doc{f}#140, limit[], sort[] estimatedRowSize[54] + */ + public void testPushSpatialIntersectsStringToSourceAndUseDocValuesForCentroid() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + | STATS centroid=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + | STATS centroid=ST_CENTROID(location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsStringToSourceCompoundPredicate() { + for (String query : new String[] { """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND type == "mid" + """, """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + AND type == "mid" + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + var and = as(filter.condition(), And.class); + var left = as(and.left(), And.class); + assertThat("filter contains ST_INTERSECTS", left.right(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of three predicates", booleanQuery.must().size(), equalTo(3)); + var condition = as(booleanQuery.must().get(1), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsStringToSourceCompoundPredicateAndUseDocValuesForCentroid() { + for (String query : new String[] { """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND type == "mid" + | STATS centroid=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE scalerank == 9 + AND ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) + AND type == "mid" + | STATS centroid=ST_CENTROID(location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + var and = as(filter.condition(), And.class); + var left = as(and.left(), And.class); + assertThat("filter contains ST_INTERSECTS", left.right(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract().stream().allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == GEO_POINT) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of three predicates", booleanQuery.must().size(), equalTo(3)); + var condition = as(booleanQuery.must().get(1), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + /** + * Plan: + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],FINAL,null] + * \_ExchangeExec[[xVal{r}#20, xDel{r}#21, yVal{r}#22, yDel{r}#23, count{r}#24, xVal{r}#25, xDel{r}#26, yVal{r}#27, yDel{r}#28, + * count{r}#29, count{r}#30, seen{r}#31],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count]] + * \_Filter[SPATIALINTERSECTS(location{f}#16,city_location{f}#19)] + * \_EsRelation[airports][abbrev{f}#12, city{f}#18, city_location{f}#19, coun..]]] + * + * Optimized: + * LimitExec[1000[INTEGER]] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],FINAL,108] + * \_ExchangeExec[[xVal{r}#20, xDel{r}#21, yVal{r}#22, yDel{r}#23, count{r}#24, xVal{r}#25, xDel{r}#26, yVal{r}#27, yDel{r}#28, + * count{r}#29, count{r}#30, seen{r}#31],true] + * \_AggregateExec[[],[SPATIALCENTROID(location{f}#16) AS location, SPATIALCENTROID(city_location{f}#19) AS city_location, COUNT([ + * 2a][KEYWORD]) AS count],PARTIAL,108] + * \_FilterExec[SPATIALINTERSECTS(location{f}#16,city_location{f}#19)] + * \_FieldExtractExec[location{f}#16, city_location{f}#19][city_location{f}#19, location{f}#16] + * \_EsQueryExec[airports], query[][_doc{f}#55], limit[], sort[] estimatedRowSize[104] + */ + public void testIntersectsOnTwoPointFieldAndBothCentroidUsesDocValues() { + String query = """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + """; + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, true); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + var filterExec = as(agg.child(), FilterExec.class); + var extract = as(filterExec.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, "location"); source(extract.child()); - assertTrue("Expect attributes field extract preference to be DOC_VALUES", extract.attributesToExtract().stream().allMatch(attr -> { - MappedFieldType.FieldExtractPreference extractPreference = PlannerUtils.extractPreference(extract.hasDocValuesAttribute(attr)); - return extractPreference == DOC_VALUES && attr.dataType() == GEO_POINT; - })); + } + + public void testIntersectsOnTwoPointFieldAndOneCentroidUsesDocValues() { + for (String query : new String[] { """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS location=ST_CENTROID(location), count=COUNT() + """, """ + FROM airports + | WHERE ST_INTERSECTS(location, city_location) + | STATS city_location=ST_CENTROID(city_location), count=COUNT() + """ }) { + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + var aggFieldName = findSingleAggregation(agg, "location", "city_location"); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, aggFieldName, SpatialCentroid.class, GEO_POINT, true); + var filterExec = as(agg.child(), FilterExec.class); + var extract = as(filterExec.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, aggFieldName); + source(extract.child()); + } + } + + public void testTwoIntersectsWithTwoCentroidsUsesDocValues() { + String query = """ + FROM airports + | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + AND ST_INTERSECTS(city_location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() + """; + + var plan = this.physicalPlan(query, airports); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + var and = as(filter.condition(), And.class); + assertThat("filter contains ST_INTERSECTS", and.left(), instanceOf(SpatialIntersects.class)); + assertThat("filter contains ST_INTERSECTS", and.right(), instanceOf(SpatialIntersects.class)); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, false); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "location", SpatialCentroid.class, GEO_POINT, true); + assertAggregation(agg, "city_location", SpatialCentroid.class, GEO_POINT, true); + var extract = as(agg.child(), FieldExtractExec.class); + assertFieldExtractionWithDocValues(extract, GEO_POINT, "location", "city_location"); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var booleanQuery = as(source.query(), BoolQueryBuilder.class); + assertThat("Expected boolean query of two predicates", booleanQuery.must().size(), equalTo(2)); + String[] fieldNames = new String[] { "location", "city_location" }; + for (String fieldName : fieldNames) { + var condition = as(findQueryBuilder(booleanQuery, fieldName), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo(fieldName)); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushSpatialIntersectsShapeToSource() { + for (String query : new String[] { """ + FROM countriesBbox + | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) + """, """ + FROM countriesBbox + | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), shape) + """ }) { + + var plan = this.physicalPlan(query, countriesBbox); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("shape")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushCartesianSpatialIntersectsToSource() { + for (String query : new String[] { """ + FROM airports_web + | WHERE ST_INTERSECTS( + location, + TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))") + ) + """, """ + FROM airports_web + | WHERE ST_INTERSECTS( + TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), + location + ) + """ }) { + + var plan = this.physicalPlan(query, airportsWeb); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } + } + + public void testPushCartesianSpatialIntersectsShapeToSource() { + for (String query : new String[] { """ + FROM countriesBboxWeb + | WHERE ST_INTERSECTS( + shape, + TO_CARTESIANSHAPE( + "POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))" + ) + ) + """, """ + FROM countriesBboxWeb + | WHERE ST_INTERSECTS( + TO_CARTESIANSHAPE( + "POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))" + ), + shape + ) + """ }) { + + var plan = this.physicalPlan(query, countriesBboxWeb); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name", condition.fieldName(), equalTo("shape")); + assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); + assertThat("Geometry is Polygon", condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length", polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes", polygon.getNumberOfHoles(), equalTo(0)); + } } public void testEnrichBeforeAggregation() { @@ -3287,6 +3879,42 @@ private static AggregateFunction assertAggregation(PhysicalPlan plan, String ali return aggFunc; } + private static String findSingleAggregation(PhysicalPlan plan, String... aliasNames) { + var agg = as(plan, AggregateExec.class); + var aggExps = agg.aggregates().stream().filter(a -> { + var alias = as(a, Alias.class); + return Arrays.stream(aliasNames).anyMatch(name -> name.equals(alias.name())); + }).toList(); + if (aggExps.size() != 1) { + throw new AssertionError( + "Expected single aggregation from " + Arrays.toString(aliasNames) + " but found " + aggExps.size() + " aggregations" + ); + } + var aggExp = aggExps.get(0); + var alias = as(aggExp, Alias.class); + return alias.name(); + } + + private static QueryBuilder findQueryBuilder(BoolQueryBuilder booleanQuery, String fieldName) { + return booleanQuery.must() + .stream() + .filter(b -> ((SpatialRelatesQuery.ShapeQueryBuilder) b).fieldName().equals(fieldName)) + .findFirst() + .get(); + } + + private void assertFieldExtractionWithDocValues(FieldExtractExec extract, DataType dataType, String... fieldNames) { + extract.attributesToExtract().forEach(attr -> { + String name = attr.qualifiedName(); + if (asList(fieldNames).contains(name)) { + assertThat("Expected field '" + name + "' to use doc-values", extract.hasDocValuesAttribute(attr), equalTo(true)); + assertThat("Expected field '" + name + "' to have data type " + dataType, attr.dataType(), equalTo(dataType)); + } else { + assertThat("Expected field '" + name + "' to NOT use doc-values", extract.hasDocValuesAttribute(attr), equalTo(false)); + } + }); + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -3344,16 +3972,11 @@ static PhysicalPlan localRelationshipAlignment(PhysicalPlan l) { } private PhysicalPlan physicalPlan(String query) { - var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); - // System.out.println("Logical\n" + logical); - var physical = mapper.map(logical); - // System.out.println(physical); - assertSerialization(physical); - return physical; + return physicalPlan(query, testData); } - private PhysicalPlan physicalPlanAirports(String query) { - var logical = logicalOptimizer.optimize(analyzerAirports.analyze(parser.createStatement(query))); + private PhysicalPlan physicalPlan(String query, TestDataSource dataSource) { + var logical = logicalOptimizer.optimize(dataSource.analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); // System.out.println(physical); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index 32bd76cf84e19..05e4d59843a10 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -108,7 +108,19 @@ public BytesRef wktToWkb(String wkt) { } } + public Geometry wktToGeometry(String wkt) { + try { + return WellKnownText.fromWKT(GeometryValidator.NOOP, false, wkt); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); + } + } + public String wkbToWkt(BytesRef wkb) { return WellKnownText.fromWKB(wkb.bytes, wkb.offset, wkb.length); } + + public Geometry wkbToGeometry(BytesRef wkb) { + return WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length); + } } From 6731538bbe18a56f827a73bf14b221454d75afc2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 10:09:18 -0700 Subject: [PATCH 025/214] Different string allocation on jdk 21/22 (#106492) Similar to https://github.com/elastic/elasticsearch/pull/106360, the methods for allocating a native string changed between Java 21 and 22. This commit adds another util method to handle the differences and uses it in the jdk systemd impl. --- .../elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java | 2 +- .../elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java | 5 +++++ .../elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 745b93ac918dd..5313984ac6d61 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -58,7 +58,7 @@ static String findLibSystemd() { @Override public int sd_notify(int unset_environment, String state) { try (Arena arena = Arena.ofConfined()) { - MemorySegment nativeState = arena.allocateUtf8String(state); + MemorySegment nativeState = MemorySegmentUtil.allocateString(arena, state); return (int) sd_notify$mh.invokeExact(unset_environment, nativeState); } catch (Throwable t) { throw new AssertionError(t); diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java index 53e4c06bf0435..6ac0243c3befe 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java @@ -8,6 +8,7 @@ package org.elasticsearch.nativeaccess.jdk; +import java.lang.foreign.Arena; import java.lang.foreign.MemorySegment; /** @@ -19,5 +20,9 @@ static String getString(MemorySegment segment, long offset) { return segment.getUtf8String(offset); } + static MemorySegment allocateString(Arena arena, String s) { + return arena.allocateUtf8String(s); + } + private MemorySegmentUtil() {} } diff --git a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java index c155647a3ccd4..59bb57d174009 100644 --- a/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java +++ b/libs/native/src/main22/java/org/elasticsearch/nativeaccess/jdk/MemorySegmentUtil.java @@ -8,6 +8,7 @@ package org.elasticsearch.nativeaccess.jdk; +import java.lang.foreign.Arena; import java.lang.foreign.MemorySegment; public class MemorySegmentUtil { @@ -16,5 +17,9 @@ static String getString(MemorySegment segment, long offset) { return segment.getString(offset); } + static MemorySegment allocateString(Arena arena, String s) { + return arena.allocateFrom(s); + } + private MemorySegmentUtil() {} } From 085e02eb7ff524abad919efaf923e5bd43483a08 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Mar 2024 13:09:57 -0400 Subject: [PATCH 026/214] ESQL: Explain an exception message (#106488) It'll make tracking errors easier. --- .../elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 07f480ce9950e..a2aa77a1661d9 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -238,7 +238,11 @@ public static void assertRequestBreakerEmpty() throws Exception { Map node = (Map) n; Map breakers = (Map) node.get("breakers"); Map request = (Map) breakers.get("request"); - assertMap(request, matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b")); + assertMap( + "circuit breakers not reset to 0", + request, + matchesMap().extraOk().entry("estimated_size_in_bytes", 0).entry("estimated_size", "0b") + ); } }); } From 022dc4d64edcfad3aad8582f71fbfebbdfdc1830 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 19 Mar 2024 18:25:23 +0100 Subject: [PATCH 027/214] Refactor (simplify) LocalHealthMonitor (#106324) This refactors the local health monitoring process by removing the HealthProgress record and the nextRunScheduled flag. This should simplify the process and thus (hopefully) make it easier to understand (and debug if needed). --- .../health/node/LocalHealthMonitor.java | 72 +++++++++--------- .../node/tracker/DiskHealthTracker.java | 4 +- .../health/node/tracker/HealthTracker.java | 75 ++++++------------- .../tracker/RepositoriesHealthTracker.java | 4 +- .../health/node/LocalHealthMonitorTests.java | 27 +++---- .../node/tracker/DiskHealthTrackerTests.java | 20 ++--- .../RepositoriesHealthTrackerTests.java | 8 +- 7 files changed, 91 insertions(+), 119 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index 5ff147a11a06a..3fde94d559338 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -205,11 +205,6 @@ public void clusterChanged(ClusterChangedEvent event) { && currentMasterNode != null; if (prerequisitesFulfilled == false || healthNodeChanged || masterNodeChanged) { stopMonitoring(); - // Reset the reference of each HealthTracker. - // By doing this after `stopMonitoring()`, we're sure the `Monitoring` instance has been cancelled and therefore won't - // touch the `lastReportedValue` of the health trackers after we've reset them (only the new `Monitoring` instance will - // be able to update them). - healthTrackers.forEach(HealthTracker::reset); } if (prerequisitesFulfilled) { startMonitoringIfNecessary(); @@ -256,6 +251,7 @@ static class Monitoring implements Runnable, Scheduler.Cancellable { private final AtomicBoolean inFlightRequest; private volatile boolean cancelled = false; + private volatile boolean fistRun = true; private volatile Scheduler.ScheduledCancellable scheduledRun; private Monitoring( @@ -323,54 +319,56 @@ public void run() { return; } // Before we do anything, we're first going to make sure there is no in-flight request at this moment. - // If that's the case, we'll acquire the "lock", which prevents any other thread/instance from sending any requests. + // If that's the case, we'll acquire the "lock", which prevents any other threads/instances from sending any requests + // and writing to the health trackers' states. if (inFlightRequest.compareAndSet(false, true) == false) { logger.debug("Not allowed to send health info update request due to in-flight request, will try again."); + // Since we weren't able to acquire the lock, we don't need to release it, and we can schedule the next run right away. scheduleNextRunIfNecessary(); return; } - boolean nextRunScheduled = false; - Runnable releaseAndScheduleNextRun = new RunOnce(() -> { - inFlightRequest.set(false); - scheduleNextRunIfNecessary(); - }); try { - List> healthProgresses = getHealthProgresses(); - if (healthProgresses.isEmpty()) { - // Next run will still be scheduled in the `finally` block. + // On the first run, we're resetting all the health trackers. When we're in a first run, we either got restarted + // (health/master node change, manual restart, etc.) or we're actually starting the whole LocalHealthMonitor for + // the first time. In either case, we want to make sure we're (re)sending all the info to the health node, hence the reset. + // We're doing it _here_, so that we've acquired the inFlightRequest lock before we make any changes to the health trackers. + if (fistRun) { + healthTrackers.forEach(HealthTracker::reset); + fistRun = false; + } + List> changedHealthTrackers = getChangedHealthTrackers(); + if (changedHealthTrackers.isEmpty()) { + releaseAndScheduleNextRun(); return; } + // Create builder and add the current value of each (changed) health tracker to the request. var builder = new UpdateHealthInfoCacheAction.Request.Builder().nodeId(clusterService.localNode().getId()); - healthProgresses.forEach(changedHealthInfo -> changedHealthInfo.updateRequestBuilder(builder)); - - var listener = ActionListener.wrap(response -> { - // Only record health progress if this monitoring instance hasn't been cancelled in the meantime. - // This avoids any unwanted writes to the HealthTrackers' states after a new monitoring instance has possibly - // already started. - if (cancelled == false) { - healthProgresses.forEach(HealthTracker.HealthProgress::recordProgressIfRelevant); - } - }, e -> { + changedHealthTrackers.forEach(changedHealthTracker -> changedHealthTracker.addToRequestBuilder(builder)); + + // We don't need to do anything with the response when the request was successful, as HealthTracker#checkHealthChanged has + // already updated it's internal state. + var listener = ActionListener.wrap(response -> {}, e -> { if (e.getCause() instanceof NodeNotConnectedException || e.getCause() instanceof HealthNodeNotDiscoveredException) { logger.debug("Failed to connect to the health node [{}], will try again.", e.getCause().getMessage()); } else { logger.debug(() -> format("Failed to send health info to health node, will try again."), e); } + // If anything went wrong, we're going to reset the changed trackers to make + // sure their health will get reported in the next iteration. + changedHealthTrackers.forEach(HealthTracker::reset); }); client.execute( UpdateHealthInfoCacheAction.INSTANCE, builder.build(), - ActionListener.runAfter(listener, releaseAndScheduleNextRun) + ActionListener.runAfter(listener, new RunOnce(this::releaseAndScheduleNextRun)) ); - nextRunScheduled = true; } catch (Exception e) { logger.warn(() -> format("Failed to run scheduled health monitoring on thread pool [%s]", executor), e); - } finally { - // If the next run isn't scheduled because for example the health info hasn't changed, we schedule it here. - if (nextRunScheduled == false) { - releaseAndScheduleNextRun.run(); - } + // If anything went wrong, we're going to reset all the trackers to make + // sure their health will get reported in the next iteration. + healthTrackers.forEach(HealthTracker::reset); + releaseAndScheduleNextRun(); } } @@ -379,17 +377,19 @@ public void run() { * * @return a list of changed health info's. */ - private List> getHealthProgresses() { + private List> getChangedHealthTrackers() { var healthMetadata = HealthMetadata.getFromClusterState(clusterService.state()); // Don't try to run the health trackers if the HealthMetadata is not available. if (healthMetadata == null) { return List.of(); } - return healthTrackers.stream().>map(HealthTracker::trackHealth) - // Only return changed values. - .filter(HealthTracker.HealthProgress::hasChanged) - .toList(); + return healthTrackers.stream().filter(HealthTracker::checkHealthChanged).toList(); + } + + private void releaseAndScheduleNextRun() { + inFlightRequest.set(false); + scheduleNextRunIfNecessary(); } private void scheduleNextRunIfNecessary() { diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java index a478130d83a78..f8aae2443e9fa 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java @@ -48,7 +48,7 @@ public DiskHealthTracker(NodeService nodeService, ClusterService clusterService) * @return the current disk health info. */ @Override - public DiskHealthInfo checkCurrentHealth() { + protected DiskHealthInfo determineCurrentHealth() { var clusterState = clusterService.state(); var healthMetadata = HealthMetadata.getFromClusterState(clusterState); DiscoveryNode node = clusterState.getNodes().getLocalNode(); @@ -92,7 +92,7 @@ public DiskHealthInfo checkCurrentHealth() { } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { builder.diskHealthInfo(healthInfo); } diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java index 2dd71a38f959e..8aec843e2bf1e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java @@ -8,31 +8,28 @@ package org.elasticsearch.health.node.tracker; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.health.node.LocalHealthMonitor; import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; -import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; - /** * Base class for health trackers that will be executed by the {@link LocalHealthMonitor}. It keeps track of the last - * reported value and can retrieve the current health status when requested. + * reported health and can retrieve the current health status when requested. * * @param the type of the health check result they track */ public abstract class HealthTracker { - private static final Logger logger = LogManager.getLogger(HealthTracker.class); - private final AtomicReference lastReportedValue = new AtomicReference<>(); + /** + * We can "simply" use a volatile field here, as we've ensured only one monitoring instance/thread at a time can update this value. + */ + private volatile T lastDeterminedHealth; /** * Determine the health info for this health check. * * @return the health info. */ - public abstract T checkCurrentHealth(); + protected abstract T determineCurrentHealth(); /** * Add the health info to the request builder. @@ -40,28 +37,29 @@ public abstract class HealthTracker { * @param builder the builder to add the health info to. * @param healthInfo the health info to add. */ - public abstract void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, T healthInfo); + protected abstract void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, T healthInfo); /** - * Create a new {@link HealthProgress} instance by getting the current last reported value and determining the health info at this time. - * - * @return the new {@link HealthProgress} instance. + * Add the last reported health to the request builder. */ - public HealthProgress trackHealth() { - return new HealthProgress<>(this, lastReportedValue.get(), checkCurrentHealth()); + public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder) { + addToRequestBuilder(builder, lastDeterminedHealth); } /** - * Update the last reported health info to current, but only when the value inside lastReportedValue - * is equal to previous. + * Determine the current health info for this tracker and check if it has changed from the last reported value. When the health has + * changed, we'll store the new health as the last reported value. * - * @param previous the previous value that should be in lastReportedValue at the time of execution. - * @param current the value that should be stored in lastReportedValue. + * @return whether the health has changed. */ - public void updateLastReportedHealth(T previous, T current) { - if (lastReportedValue.compareAndSet(previous, current)) { - logger.debug("Health info [{}] successfully sent, last reported value: {}.", current, previous); + public boolean checkHealthChanged() { + var health = determineCurrentHealth(); + assert health != null : "health trackers must return unknown health instead of null"; + if (health.equals(lastDeterminedHealth)) { + return false; } + lastDeterminedHealth = health; + return true; } /** @@ -69,37 +67,10 @@ public void updateLastReportedHealth(T previous, T current) { * Should be used when, for example, the master or health node has changed. */ public void reset() { - lastReportedValue.set(null); + lastDeterminedHealth = null; } - public T getLastReportedValue() { - return lastReportedValue.get(); - } - - /** - * A record for storing the previous and current value of a health check. This allows us to be sure no concurrent processes have - * updated the health check's reference value. - * - * @param the type that the health tracker returns - */ - public record HealthProgress(HealthTracker healthTracker, T previousHealth, T currentHealth) { - public boolean hasChanged() { - return Objects.equals(previousHealth, currentHealth) == false; - } - - /** - * See {@link HealthTracker#addToRequestBuilder}. - */ - public void updateRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder) { - healthTracker.addToRequestBuilder(builder, currentHealth); - } - - /** - * Update the reference value of the health tracker with the current health info. - * See {@link HealthTracker#updateLastReportedHealth} for more info. - */ - public void recordProgressIfRelevant() { - healthTracker.updateLastReportedHealth(previousHealth, currentHealth); - } + public T getLastDeterminedHealth() { + return lastDeterminedHealth; } } diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java index cffc470045e0b..016df116300b8 100644 --- a/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java @@ -33,7 +33,7 @@ public RepositoriesHealthTracker(RepositoriesService repositoriesService) { * @return the current repositories health on this node. */ @Override - public RepositoriesHealthInfo checkCurrentHealth() { + protected RepositoriesHealthInfo determineCurrentHealth() { var repositories = repositoriesService.getRepositories(); if (repositories.isEmpty()) { return new RepositoriesHealthInfo(List.of(), List.of()); @@ -52,7 +52,7 @@ public RepositoriesHealthInfo checkCurrentHealth() { } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, RepositoriesHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, RepositoriesHealthInfo healthInfo) { builder.repositoriesHealthInfo(healthInfo); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java index a4436fd637c5a..d4c21ed9839fc 100644 --- a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java @@ -152,9 +152,9 @@ public void testUpdateHealthInfo() throws Exception { // We override the poll interval like this to avoid the min value set by the setting which is too high for this test localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); } @SuppressWarnings("unchecked") @@ -169,7 +169,7 @@ public void testDoNotUpdateHealthInfoOnFailure() throws Exception { localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); assertBusy(() -> assertThat(clientCalled.get(), equalTo(true))); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); } @SuppressWarnings("unchecked") @@ -189,9 +189,10 @@ public void testSendHealthInfoToNewNode() throws Exception { return null; }).when(client).execute(any(), any(), any()); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); when(clusterService.state()).thenReturn(previous); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -213,9 +214,10 @@ public void testResendHealthInfoOnMasterChange() throws Exception { return null; }).when(client).execute(any(), any(), any()); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); when(clusterService.state()).thenReturn(previous); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -233,25 +235,24 @@ public void testEnablingAndDisabling() throws Exception { // Ensure that there are no issues if the cluster state hasn't been initialized yet localHealthMonitor.setEnabled(true); - assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); + assertThat(mockHealthTracker.getLastDeterminedHealth(), nullValue()); assertThat(clientCalledCount.get(), equalTo(0)); when(clusterService.state()).thenReturn(clusterState); localHealthMonitor.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); - assertThat(clientCalledCount.get(), equalTo(1)); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(GREEN))); + assertBusy(() -> assertThat(clientCalledCount.get(), equalTo(1))); DiskHealthInfo nextHealthStatus = new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); // Disable the local monitoring localHealthMonitor.setEnabled(false); - localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(1)); + localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); mockHealthTracker.setHealthInfo(nextHealthStatus); assertThat(clientCalledCount.get(), equalTo(1)); - localHealthMonitor.setMonitorInterval(TimeValue.timeValueSeconds(30)); localHealthMonitor.setEnabled(true); - assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(nextHealthStatus))); + assertBusy(() -> assertThat(mockHealthTracker.getLastDeterminedHealth(), equalTo(nextHealthStatus))); } /** @@ -386,12 +387,12 @@ private static class MockHealthTracker extends HealthTracker { private volatile DiskHealthInfo healthInfo = GREEN; @Override - public DiskHealthInfo checkCurrentHealth() { + protected DiskHealthInfo determineCurrentHealth() { return healthInfo; } @Override - public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + protected void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { builder.diskHealthInfo(healthInfo); } diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java index dd2ef861e85c3..cb503312124d9 100644 --- a/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java @@ -116,25 +116,25 @@ public void testNoDiskData() { eq(false) ) ).thenReturn(nodeStats()); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS), diskHealth); } public void testGreenDiskStatus() { simulateHealthDiskSpace(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } public void testYellowDiskStatus() { initializeIncreasedDiskSpaceUsage(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } public void testRedDiskStatus() { simulateDiskOutOfSpace(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -144,7 +144,7 @@ public void testFrozenGreenDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } @@ -154,7 +154,7 @@ public void testFrozenRedDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -165,7 +165,7 @@ public void testSearchNodeGreenDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(GREEN, diskHealth); } @@ -176,7 +176,7 @@ public void testSearchNodeRedDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); } @@ -187,7 +187,7 @@ public void testSearchAndIndexNodesYellowDiskStatus() { b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchAndIndexNode).localNodeId(searchAndIndexNode.getId()).build()) ); when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } @@ -204,7 +204,7 @@ public void testYellowStatusForNonDataNode() { ).copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); initializeIncreasedDiskSpaceUsage(); - DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + DiskHealthInfo diskHealth = diskHealthTracker.determineCurrentHealth(); assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); } diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java index 0b5f09acc69ca..8b7a2648b0529 100644 --- a/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java @@ -45,7 +45,7 @@ public void setUp() throws Exception { public void testGetHealthNoRepos() { when(repositoriesService.getRepositories()).thenReturn(Map.of()); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertTrue(health.invalidRepositories().isEmpty()); @@ -59,7 +59,7 @@ public void testGetHealthCorrectRepo() { when(repo.getMetadata()).thenReturn(metadata); when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), repo)); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertTrue(health.invalidRepositories().isEmpty()); @@ -69,7 +69,7 @@ public void testGetHealthUnknownType() { var repo = createRepositoryMetadata(); when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), new UnknownTypeRepository(repo))); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertEquals(1, health.unknownRepositories().size()); assertEquals(repo.name(), health.unknownRepositories().get(0)); @@ -82,7 +82,7 @@ public void testGetHealthInvalid() { Map.of(repo.name(), new InvalidRepository(repo, new RepositoryException(repo.name(), "Test"))) ); - var health = repositoriesHealthTracker.checkCurrentHealth(); + var health = repositoriesHealthTracker.determineCurrentHealth(); assertTrue(health.unknownRepositories().isEmpty()); assertEquals(1, health.invalidRepositories().size()); From 97b89770b7faa1a78bdce4b32ee37455f4cc4fd0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 19 Mar 2024 18:33:27 +0100 Subject: [PATCH 028/214] [Profiling] Accept OTEL host architecture values (#106494) --- .../xpack/profiling/CO2Calculator.java | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 454cd35b396b9..d681517a7ce6f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -53,14 +53,13 @@ public double getAnnualCO2Tons(String hostID, long samples) { } private double getKiloWattsPerCore(HostMetadata host) { - if ("aarch64".equals(host.profilingHostMachine)) { - // Assume that AARCH64 (aka ARM64) machines are more energy efficient than x86_64 machines. - return customKilowattsPerCoreARM64; - } - if ("x86_64".equals(host.profilingHostMachine)) { - return customKilowattsPerCoreX86; - } - return DEFAULT_KILOWATTS_PER_CORE; + return switch (host.profilingHostMachine) { + // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, + // which require "arm64" and "amd64" to be reported as the host architecture. + case "arm64", "aarch64" -> customKilowattsPerCoreARM64; + case "amd64", "x86_64" -> customKilowattsPerCoreX86; + default -> DEFAULT_KILOWATTS_PER_CORE; + }; } private double getCO2TonsPerKWH(HostMetadata host) { From 0f504c13985580253efcb64acee55f6217ea12b5 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 19 Mar 2024 11:03:23 -0700 Subject: [PATCH 029/214] AwaitsFix #106501 --- .../src/test/java/org/elasticsearch/xpack/esql/CsvTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 56b6dcdd1ad8b..0948387c011a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -142,6 +143,7 @@ * To log the results logResults() should return "true". */ // @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106501") public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); From 1802aea7239fd919cc1fd0c90c430ffad5b4f12f Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 19 Mar 2024 19:14:08 +0100 Subject: [PATCH 030/214] ESQL: Enrich match text (#106435) * WIP Support ENRICH MATCH on TEXT * Disallow KEYWORD from range enrich The ingest processor does not support this, and there is no keyword_range type to complement the numerical, date and ip range types. * Revert: Disallow KEYWORD from range enrich We allow using KEYWORD to range match against ip_range. * Update docs/changelog/106435.yaml * Improve changelog entry * Added yaml test for ENRICH on TEXT fields * Allow TEXT for range, so text matches IP-range (plus test) --- docs/changelog/106435.yaml | 6 ++ .../xpack/esql/CsvTestsDataLoader.java | 4 +- .../resources/enrich-IT_tests_only.csv-spec | 13 +++ .../enrich-policy-city_airports.json | 7 ++ .../xpack/esql/analysis/Analyzer.java | 3 +- .../rest-api-spec/test/esql/60_enrich.yml | 87 +++++++++++++++---- .../rest-api-spec/test/esql/61_enrich_ip.yml | 55 ++++++++++++ 7 files changed, 155 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/106435.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json diff --git a/docs/changelog/106435.yaml b/docs/changelog/106435.yaml new file mode 100644 index 0000000000000..5bfe0087a93d3 --- /dev/null +++ b/docs/changelog/106435.yaml @@ -0,0 +1,6 @@ +pr: 106435 +summary: "ENRICH support for TEXT fields" +area: ES|QL +type: enhancement +issues: + - 105384 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 3cddf3c10a7fe..ec181c552bf22 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -107,6 +107,7 @@ public class CsvTestsDataLoader { private static final EnrichConfig DECADES_ENRICH = new EnrichConfig("decades_policy", "enrich-policy-decades.json"); private static final EnrichConfig CITY_NAMES_ENRICH = new EnrichConfig("city_names", "enrich-policy-city_names.json"); private static final EnrichConfig CITY_BOUNDARIES_ENRICH = new EnrichConfig("city_boundaries", "enrich-policy-city_boundaries.json"); + private static final EnrichConfig CITY_AIRPORTS_ENRICH = new EnrichConfig("city_airports", "enrich-policy-city_airports.json"); public static final List ENRICH_SOURCE_INDICES = List.of( "languages", @@ -125,7 +126,8 @@ public class CsvTestsDataLoader { HEIGHTS_ENRICH, DECADES_ENRICH, CITY_NAMES_ENRICH, - CITY_BOUNDARIES_ENRICH + CITY_BOUNDARIES_ENRICH, + CITY_AIRPORTS_ENRICH ); /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec index cec1157455b18..ee43efa69447b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec @@ -334,3 +334,16 @@ count:long | centroid:geo_point | airport_in_city:boolean 396 | POINT (-2.534797 20.667712) | true 455 | POINT (3.090752 27.676442) | false ; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json new file mode 100644 index 0000000000000..a02f98509be22 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-city_airports.json @@ -0,0 +1,7 @@ +{ + "match": { + "indices": "airport_city_boundaries", + "match_field": "airport", + "enrich_fields": ["city", "region", "city_boundary"] + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 3540aa83638a1..ce5a43c617b78 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -94,6 +94,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.NESTED; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; public class Analyzer extends ParameterizedRuleExecutor { // marker list of attributes for plans that do not have any concrete fields to return, but have other computed columns to return @@ -605,7 +606,7 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) } private static final DataType[] GEO_TYPES = new DataType[] { GEO_POINT, GEO_SHAPE }; - private static final DataType[] NON_GEO_TYPES = new DataType[] { KEYWORD, IP, LONG, INTEGER, FLOAT, DOUBLE, DATETIME }; + private static final DataType[] NON_GEO_TYPES = new DataType[] { KEYWORD, TEXT, IP, LONG, INTEGER, FLOAT, DOUBLE, DATETIME }; private DataType[] allowedEnrichTypes(String matchType) { return matchType.equals(GEO_MATCH_TYPE) ? GEO_TYPES : NON_GEO_TYPES; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 2c3c919f2b37d..2e181af93f77c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -6,7 +6,7 @@ setup: features: allowed_warnings_regex - do: indices.create: - index: cities + index: cities body: settings: number_of_shards: 5 @@ -15,13 +15,13 @@ setup: city_code: type: keyword city: - type: keyword + type: text country: type: keyword - do: bulk: - index: "cities" + index: cities refresh: true body: - { "index": { } } @@ -31,16 +31,29 @@ setup: - do: enrich.put_policy: - name: cities_policy + name: city_codes_policy body: match: - indices: ["cities"] + indices: [ "cities" ] match_field: "city_code" - enrich_fields: ["city", "country"] + enrich_fields: [ "city", "country" ] + + - do: + enrich.put_policy: + name: city_names_policy + body: + match: + indices: [ "cities" ] + match_field: "city" + enrich_fields: [ "city_code", "country" ] + + - do: + enrich.execute_policy: + name: city_codes_policy - do: enrich.execute_policy: - name: cities_policy + name: city_names_policy - do: indices.create: @@ -52,39 +65,44 @@ setup: type: keyword city_id: type: keyword + city_name: + type: text - do: bulk: - index: "test" + index: test refresh: true body: - { "index": { } } - - { "name": "Alice", "city_id": "nyc" } + - { "name": "Alice", "city_id": "nyc", "city_name": "New York" } - { "index": { } } - - { "name": "Bob", "city_id": "nyc" } + - { "name": "Bob", "city_id": "nyc", "city_name": "New York" } - { "index": { } } - - { "name": "Mario", "city_id": "rom" } + - { "name": "Mario", "city_id": "rom", "city_name": "Rome" } - { "index": { } } - - { "name": "Denise", "city_id": "sgn" } + - { "name": "Denise", "city_id": "sgn", "city_name": "Tan Son Nhat" } --- teardown: - do: enrich.delete_policy: - name: cities_policy + name: city_codes_policy + - do: + enrich.delete_policy: + name: city_names_policy --- -"Basic": +"Enrich on keyword": - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | enrich cities_policy on city_id | keep name, city, country | sort name' + query: 'from test | enrich city_codes_policy on city_id | keep name, city, country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } - match: { columns.1.name: "city" } - - match: { columns.1.type: "keyword" } + - match: { columns.1.type: "text" } - match: { columns.2.name: "country" } - match: { columns.2.type: "keyword" } @@ -95,12 +113,14 @@ teardown: - match: { values.3: [ "Mario", "Rome", "Italy" ] } +--- +"Enrich on keyword with fields": - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | keep name, city_id | enrich cities_policy on city_id with country | sort name' + query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -116,12 +136,14 @@ teardown: - match: { values.3: [ "Mario", "rom", "Italy" ] } +--- +"Enrich on keyword with fields alias": - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test | keep name, city_id | enrich cities_policy on city_id with country_name = country | sort name' + query: 'from test | keep name, city_id | enrich city_codes_policy on city_id with country_name = country | sort name' - match: { columns.0.name: "name" } - match: { columns.0.type: "keyword" } @@ -135,3 +157,32 @@ teardown: - match: { values.1: [ "Bob", "nyc", "USA" ] } - match: { values.2: [ "Denise", "sgn", null ] } - match: { values.3: [ "Mario", "rom", "Italy" ] } + + +--- +"Enrich on text": + - skip: + version: " - 8.13.99" + reason: "TEXT field ENRICH support was added in 8.14.0" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | keep name, city_name | enrich city_names_policy on city_name | sort name' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "city_name" } + - match: { columns.1.type: "text" } + - match: { columns.2.name: "city_code" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "country" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "Alice", "New York", "nyc", "USA" ] } + - match: { values.1: [ "Bob", "New York", "nyc", "USA" ] } + - match: { values.2: [ "Denise", "Tan Son Nhat", null, null ] } + - match: { values.3: [ "Mario", "Rome", "rom", "Italy" ] } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 76dff626b9481..77a6811b899f6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -74,6 +74,33 @@ setup: - { "index": { } } - { "@timestamp": "2023-06-24", "ip": "13.101.0.114", "message": "authentication failed" } + - do: + indices.create: + index: events_text + body: + mappings: + properties: + "@timestamp": + type: date + ip_text: + type: text + message: + type: keyword + + - do: + bulk: + index: events_text + refresh: true + body: + - { "index": { } } + - { "@timestamp": "2023-06-20", "ip_text": "10.100.0.21", "message": "network connected" } + - { "index": { } } + - { "@timestamp": "2023-06-21", "ip_text": [ "10.100.0.21", "10.101.0.107" ], "message": "sending messages" } + - { "index": { } } + - { "@timestamp": "2023-06-22", "ip_text": "10.101.0.107", "message": "network disconnected" } + - { "index": { } } + - { "@timestamp": "2023-06-24", "ip_text": "13.101.0.114", "message": "authentication failed" } + --- teardown: - do: @@ -104,6 +131,34 @@ teardown: - match: { values.2: [ "10.101.0.107" , "QA", "Engineering", "network disconnected" ] } - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } +--- +"IP text fields": + - skip: + version: " - 8.13.99" + reason: "ENRICH support for TEXT fields was added in 8.14.0" + + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM events_text | ENRICH networks-policy ON ip_text | sort @timestamp | KEEP ip_text, name, department, message' + + - match: { columns.0.name: "ip_text" } + - match: { columns.0.type: "text" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", "Production", "OPS", "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], [ "Production", "QA" ], [ "OPS","Engineering" ], "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , "QA", "Engineering", "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } + --- "Invalid IP strings": - skip: From acb16ff36bc0790fe3913041ce70a5518c7f3819 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 11:14:18 -0700 Subject: [PATCH 031/214] Improve packaging test debugging (#106491) Packaging tests have several files that may be useful in debugging failures. Additionally, we sometimes have assertions for which we want to catch them and emit additional debugging info. This commit guards the common ways that Elasticsearch is started and assertions are run with dumping all debug information available. --- .../packaging/test/PackagingTestCase.java | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index 303b37210535d..36fac5fe89ab5 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -289,15 +289,15 @@ protected void dumpDebug() { protected void assertWhileRunning(Platforms.PlatformAction assertions) throws Exception { try { awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false)); - } catch (Exception e) { + } catch (AssertionError | Exception e) { dumpDebug(); throw e; } try { assertions.run(); - } catch (Exception e) { - logger.warn("Elasticsearch log:\n" + FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz")); + } catch (AssertionError | Exception e) { + dumpDebug(); throw e; } stopElasticsearch(); @@ -392,15 +392,8 @@ public Shell.Result awaitElasticsearchStartupWithResult(Shell.Result result) thr public void startElasticsearch() throws Exception { try { awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false)); - } catch (Exception e) { - if (Files.exists(installation.home.resolve("elasticsearch.pid"))) { - String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim(); - logger.info("elasticsearch process ({}) failed to start", pid); - if (sh.run("jps").stdout().contains(pid)) { - logger.info("Dumping jstack of elasticsearch process ({}) ", pid); - sh.runIgnoreExitCode("jstack " + pid); - } - } + } catch (AssertionError | Exception e) { + dumpDebug(); throw e; } } From 8eda6b2716ab21c4728827b76abde95c11d2414e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 11:14:46 -0700 Subject: [PATCH 032/214] Wait for shard migration status update on shutdown (#106497) The shutdown integration tests test scenarios across multiple nodes. When checking if a shard is moved off a node that is shutting down, the shard migration status may not yet have been updated. This commit adds a busy wait to ensure the status has time to update before failing the test. closes #77488 --- .../xpack/shutdown/NodeShutdownIT.java | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java index c004eaf58939b..df5302dd62811 100644 --- a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java +++ b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -249,7 +248,6 @@ public void testShardsMoveOffRemovingNode() throws Exception { putNodeShutdown(nodeIdToShutdown, "REMOVE"); // assertBusy waiting for the shard to no longer be on that node - AtomicReference> debug = new AtomicReference<>(); assertBusy(() -> { List shardsResponse = entityAsList(client().performRequest(checkShardsRequest)); final long shardsOnNodeToShutDown = shardsResponse.stream() @@ -258,16 +256,17 @@ public void testShardsMoveOffRemovingNode() throws Exception { .filter(shard -> "STARTED".equals(shard.get("state")) || "RELOCATING".equals(shard.get("state"))) .count(); assertThat(shardsOnNodeToShutDown, is(0L)); - debug.set(shardsResponse); }); - // Now check the shard migration status - Request getStatusRequest = new Request("GET", "_nodes/" + nodeIdToShutdown + "/shutdown"); - Response statusResponse = client().performRequest(getStatusRequest); - Map status = entityAsMap(statusResponse); - assertThat(ObjectPath.eval("nodes.0.shard_migration.status", status), equalTo("COMPLETE")); - assertThat(ObjectPath.eval("nodes.0.shard_migration.shard_migrations_remaining", status), equalTo(0)); - assertThat(ObjectPath.eval("nodes.0.shard_migration.explanation", status), nullValue()); + assertBusy(() -> { + // Now check the shard migration status + Request getStatusRequest = new Request("GET", "_nodes/" + nodeIdToShutdown + "/shutdown"); + Response statusResponse = client().performRequest(getStatusRequest); + Map status = entityAsMap(statusResponse); + assertThat(ObjectPath.eval("nodes.0.shard_migration.status", status), equalTo("COMPLETE")); + assertThat(ObjectPath.eval("nodes.0.shard_migration.shard_migrations_remaining", status), equalTo(0)); + assertThat(ObjectPath.eval("nodes.0.shard_migration.explanation", status), nullValue()); + }); } public void testShardsCanBeAllocatedAfterShutdownDeleted() throws Exception { From e36b3eb310790fd6b95a6a8c6246bbf5fa47ed07 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 11:15:02 -0700 Subject: [PATCH 033/214] Re-enable shutdown stalled test (#106498) NodeShutdownIT.testStalledShardMigrationProperlyDetected has been muted for a couple years. It apparently reproduced when the failure first started, but no longer reproduces on main. This commit re-enables the test and closes the test issue. We can open a new issue with any subsequent failure. closes #77456 --- .../java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java index df5302dd62811..b249e12946159 100644 --- a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java +++ b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java @@ -291,7 +291,6 @@ public void testShardsCanBeAllocatedAfterShutdownDeleted() throws Exception { ensureGreen(indexName); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/77456") public void testStalledShardMigrationProperlyDetected() throws Exception { String nodeIdToShutdown = getRandomNodeId(); int numberOfShards = randomIntBetween(1, 5); From bffd2a964c3e718339e65be98d6474217214a09f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Mar 2024 14:35:14 -0400 Subject: [PATCH 034/214] ESQL: Regex improvements (#106429) This makes a couple of changes to regex processing in the compute engine: 1. Process utf-8 strings directly. This should save a ton of time. 2. Snip the `toString` output if it is too big - I chose 64kb of strings. 3. I changed the formatting of the automaton to a slightly customized `dot` output. Because automata are graphs. Everyone knows it. And they are a lot easier to read as graphs. `dot` is easy to convert into a graph. 4. I implement `EvaluatorMapper` for regex operations which is pretty standard for the rest of our operations. --- .../compute/operator/EvalBenchmark.java | 34 +++- docs/changelog/106429.yaml | 5 + .../function/scalar/math/PowIntEvaluator.java | 146 ---------------- .../scalar/math/PowLongEvaluator.java | 146 ---------------- .../string/AutomataMatchEvaluator.java} | 36 ++-- .../xpack/esql/evaluator/EvalMapper.java | 2 - .../predicate/operator/regex/RegexMapper.java | 31 ---- .../predicate/operator/regex/RegexMatch.java | 23 --- .../function/scalar/string/AutomataMatch.java | 137 +++++++++++++++ .../function/scalar/string}/RLike.java | 15 +- .../function/scalar/string}/WildcardLike.java | 21 ++- .../xpack/esql/io/stream/PlanNamedTypes.java | 4 +- .../xpack/esql/parser/ExpressionBuilder.java | 4 +- .../function/AbstractFunctionTestCase.java | 43 +++-- .../expression/function/TestCaseSupplier.java | 162 +++++++++++++++--- .../scalar/multivalue/MvSortTests.java | 43 +---- .../function/scalar/string/RLikeTests.java | 157 +++++++++++++++++ .../scalar/string/WildcardLikeTests.java | 50 ++++++ .../optimizer/LogicalPlanOptimizerTests.java | 4 +- .../esql/parser/StatementParserTests.java | 4 +- 20 files changed, 614 insertions(+), 453 deletions(-) create mode 100644 docs/changelog/106429.yaml delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java delete mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java rename x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/{evaluator/predicate/operator/regex/RegexMatchEvaluator.java => expression/function/scalar/string/AutomataMatchEvaluator.java} (72%) delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{evaluator/predicate/operator/regex => expression/function/scalar/string}/RLike.java (72%) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{evaluator/predicate/operator/regex => expression/function/scalar/string}/WildcardLike.java (62%) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 1765897ba35e7..5b217efbe1ed1 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -8,6 +8,7 @@ package org.elasticsearch.benchmark.compute.operator; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; @@ -26,11 +27,13 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; @@ -58,7 +61,6 @@ @State(Scope.Thread) @Fork(1) public class EvalBenchmark { - private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? private static final BlockFactory blockFactory = BlockFactory.getInstance( new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE @@ -82,7 +84,9 @@ public class EvalBenchmark { } } - @Param({ "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending" }) + @Param( + { "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending", "rlike" } + ) public String operation; private static Operator operator(String operation) { @@ -134,6 +138,11 @@ private static EvalOperator.ExpressionEvaluator evaluator(String operation) { FieldAttribute longField = longField(); yield EvalMapper.toEvaluator(new MvMin(Source.EMPTY, longField), layout(longField)).get(driverContext); } + case "rlike" -> { + FieldAttribute keywordField = keywordField(); + RLike rlike = new RLike(Source.EMPTY, keywordField, new RLikePattern(".ar")); + yield EvalMapper.toEvaluator(rlike, layout(keywordField)).get(driverContext); + } default -> throw new UnsupportedOperationException(); }; } @@ -146,6 +155,10 @@ private static FieldAttribute intField() { return new FieldAttribute(Source.EMPTY, "int", new EsField("int", DataTypes.INTEGER, Map.of(), true)); } + private static FieldAttribute keywordField() { + return new FieldAttribute(Source.EMPTY, "keyword", new EsField("keyword", DataTypes.KEYWORD, Map.of(), true)); + } + private static Layout layout(FieldAttribute... fields) { Layout.Builder layout = new Layout.Builder(); layout.append(Arrays.asList(fields)); @@ -205,6 +218,15 @@ private static void checkExpected(String operation, Page actual) { } } } + case "rlike" -> { + BooleanVector v = actual.getBlock(1).asVector(); + for (int i = 0; i < BLOCK_LENGTH; i++) { + boolean expected = i % 2 == 1; + if (v.getBoolean(i) != expected) { + throw new AssertionError("[" + operation + "] expected [" + expected + "] but was [" + v.getBoolean(i) + "]"); + } + } + } default -> throw new UnsupportedOperationException(); } } @@ -250,6 +272,14 @@ private static Page page(String operation) { } yield new Page(builder.build()); } + case "rlike" -> { + var builder = blockFactory.newBytesRefVectorBuilder(BLOCK_LENGTH); + BytesRef[] values = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + for (int i = 0; i < BLOCK_LENGTH; i++) { + builder.appendBytesRef(values[i % 2]); + } + yield new Page(builder.build().asBlock()); + } default -> throw new UnsupportedOperationException(); }; } diff --git a/docs/changelog/106429.yaml b/docs/changelog/106429.yaml new file mode 100644 index 0000000000000..7ac524d13909b --- /dev/null +++ b/docs/changelog/106429.yaml @@ -0,0 +1,5 @@ +pr: 106429 +summary: "ESQL: Regex improvements" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java deleted file mode 100644 index 1232e0dda7c0f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import java.lang.ArithmeticException; -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. - */ -public final class PowIntEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator base; - - private final EvalOperator.ExpressionEvaluator exponent; - - private final DriverContext driverContext; - - public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.base = base; - this.exponent = exponent; - this.driverContext = driverContext; - } - - @Override - public Block.Ref eval(Page page) { - try (Block.Ref baseRef = base.eval(page)) { - DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); - try (Block.Ref exponentRef = exponent.eval(page)) { - DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); - } - } - } - - public IntBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (baseBlock.getValueCount(p) != 1) { - if (baseBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (exponentBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (exponentBlock.getValueCount(p) != 1) { - if (exponentBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendInt(Pow.processInt(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public IntBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { - try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendInt(Pow.processInt(baseVector.getDouble(p), exponentVector.getDouble(p))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(base, exponent); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory base; - - private final EvalOperator.ExpressionEvaluator.Factory exponent; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, - EvalOperator.ExpressionEvaluator.Factory exponent) { - this.source = source; - this.base = base; - this.exponent = exponent; - } - - @Override - public PowIntEvaluator get(DriverContext context) { - return new PowIntEvaluator(source, base.get(context), exponent.get(context), context); - } - - @Override - public String toString() { - return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java deleted file mode 100644 index bd2e5f5e10ec2..0000000000000 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License -// 2.0; you may not use this file except in compliance with the Elastic License -// 2.0. -package org.elasticsearch.xpack.esql.expression.function.scalar.math; - -import java.lang.ArithmeticException; -import java.lang.IllegalArgumentException; -import java.lang.Override; -import java.lang.String; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; -import org.elasticsearch.core.Releasables; -import org.elasticsearch.xpack.esql.expression.function.Warnings; -import org.elasticsearch.xpack.ql.tree.Source; - -/** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. - * This class is generated. Do not edit it. - */ -public final class PowLongEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; - - private final EvalOperator.ExpressionEvaluator base; - - private final EvalOperator.ExpressionEvaluator exponent; - - private final DriverContext driverContext; - - public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, - EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { - this.warnings = new Warnings(source); - this.base = base; - this.exponent = exponent; - this.driverContext = driverContext; - } - - @Override - public Block.Ref eval(Page page) { - try (Block.Ref baseRef = base.eval(page)) { - DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); - try (Block.Ref exponentRef = exponent.eval(page)) { - DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); - DoubleVector baseVector = baseBlock.asVector(); - if (baseVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - DoubleVector exponentVector = exponentBlock.asVector(); - if (exponentVector == null) { - return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); - } - return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); - } - } - } - - public LongBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (baseBlock.getValueCount(p) != 1) { - if (baseBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - if (exponentBlock.isNull(p)) { - result.appendNull(); - continue position; - } - if (exponentBlock.getValueCount(p) != 1) { - if (exponentBlock.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); - } - result.appendNull(); - continue position; - } - try { - result.appendLong(Pow.processLong(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - public LongBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { - try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { - position: for (int p = 0; p < positionCount; p++) { - try { - result.appendLong(Pow.processLong(baseVector.getDouble(p), exponentVector.getDouble(p))); - } catch (ArithmeticException e) { - warnings.registerException(e); - result.appendNull(); - } - } - return result.build(); - } - } - - @Override - public String toString() { - return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - - @Override - public void close() { - Releasables.closeExpectNoException(base, exponent); - } - - static class Factory implements EvalOperator.ExpressionEvaluator.Factory { - private final Source source; - - private final EvalOperator.ExpressionEvaluator.Factory base; - - private final EvalOperator.ExpressionEvaluator.Factory exponent; - - public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, - EvalOperator.ExpressionEvaluator.Factory exponent) { - this.source = source; - this.base = base; - this.exponent = exponent; - } - - @Override - public PowLongEvaluator get(DriverContext context) { - return new PowLongEvaluator(source, base.get(context), exponent.get(context), context); - } - - @Override - public String toString() { - return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; - } - } -} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java similarity index 72% rename from x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java rename to x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java index bdb8bfd0f613a..fb95bbc1acef9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatchEvaluator.java @@ -2,13 +2,13 @@ // or more contributor license agreements. Licensed under the Elastic License // 2.0; you may not use this file except in compliance with the Elastic License // 2.0. -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; @@ -22,22 +22,25 @@ import org.elasticsearch.xpack.ql.tree.Source; /** - * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RegexMatch}. + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link AutomataMatch}. * This class is generated. Do not edit it. */ -public final class RegexMatchEvaluator implements EvalOperator.ExpressionEvaluator { +public final class AutomataMatchEvaluator implements EvalOperator.ExpressionEvaluator { private final Warnings warnings; private final EvalOperator.ExpressionEvaluator input; - private final CharacterRunAutomaton pattern; + private final ByteRunAutomaton automaton; + + private final String pattern; private final DriverContext driverContext; - public RegexMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, - CharacterRunAutomaton pattern, DriverContext driverContext) { + public AutomataMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, + ByteRunAutomaton automaton, String pattern, DriverContext driverContext) { this.warnings = new Warnings(source); this.input = input; + this.automaton = automaton; this.pattern = pattern; this.driverContext = driverContext; } @@ -68,7 +71,7 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { result.appendNull(); continue position; } - result.appendBoolean(RegexMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), pattern)); + result.appendBoolean(AutomataMatch.process(inputBlock.getBytesRef(inputBlock.getFirstValueIndex(p), inputScratch), automaton, pattern)); } return result.build(); } @@ -78,7 +81,7 @@ public BooleanVector eval(int positionCount, BytesRefVector inputVector) { try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - result.appendBoolean(RegexMatch.process(inputVector.getBytesRef(p, inputScratch), pattern)); + result.appendBoolean(AutomataMatch.process(inputVector.getBytesRef(p, inputScratch), automaton, pattern)); } return result.build(); } @@ -86,7 +89,7 @@ public BooleanVector eval(int positionCount, BytesRefVector inputVector) { @Override public String toString() { - return "RegexMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; + return "AutomataMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; } @Override @@ -99,23 +102,26 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory input; - private final CharacterRunAutomaton pattern; + private final ByteRunAutomaton automaton; + + private final String pattern; public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory input, - CharacterRunAutomaton pattern) { + ByteRunAutomaton automaton, String pattern) { this.source = source; this.input = input; + this.automaton = automaton; this.pattern = pattern; } @Override - public RegexMatchEvaluator get(DriverContext context) { - return new RegexMatchEvaluator(source, input.get(context), pattern, context); + public AutomataMatchEvaluator get(DriverContext context) { + return new AutomataMatchEvaluator(source, input.get(context), automaton, pattern, context); } @Override public String toString() { - return "RegexMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; + return "AutomataMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 1087e9d33b805..c26f722d9f765 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.ComparisonMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEqualsMapper; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMapper; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -48,7 +47,6 @@ public final class EvalMapper { ComparisonMapper.LESS_THAN, ComparisonMapper.LESS_THAN_OR_EQUAL, InMapper.IN_MAPPER, - RegexMapper.REGEX_MATCH, new InsensitiveEqualsMapper(), new BooleanLogic(), new Nots(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java deleted file mode 100644 index f37751e18858f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; - -import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; -import org.elasticsearch.xpack.esql.evaluator.EvalMapper; -import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; -import org.elasticsearch.xpack.esql.planner.Layout; -import org.elasticsearch.xpack.ql.expression.predicate.regex.AbstractStringPattern; -import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; - -public abstract class RegexMapper extends ExpressionMapper> { - - public static final ExpressionMapper REGEX_MATCH = new RegexMapper() { - @Override - public ExpressionEvaluator.Factory map(RegexMatch expression, Layout layout) { - return dvrCtx -> new RegexMatchEvaluator( - expression.source(), - EvalMapper.toEvaluator(expression.field(), layout).get(dvrCtx), - new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()), - dvrCtx - ); - } - }; -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java deleted file mode 100644 index ac1bf2031750f..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatch.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.ann.Fixed; - -public class RegexMatch { - @Evaluator - static boolean process(BytesRef input, @Fixed CharacterRunAutomaton pattern) { - if (input == null) { - return false; - } - return pattern.run(input.utf8ToString()); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java new file mode 100644 index 0000000000000..7dac02e50ddbc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.Transition; +import org.apache.lucene.util.automaton.UTF32ToUTF8; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * Matches {@link BytesRef}s against {@link Automaton automata}. + */ +public class AutomataMatch { + /** + * Build an {@link EvalOperator.ExpressionEvaluator.Factory} that will match + * {@link BytesRef}s against {@link Automaton automata} and return a {@link BooleanBlock}. + */ + public static EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Source source, + EvalOperator.ExpressionEvaluator.Factory field, + Automaton utf32Automaton + ) { + /* + * ByteRunAutomaton has a way to convert utf32 to utf8, but if we used it + * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. + */ + Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); + } + + @Evaluator + static boolean process(BytesRef input, @Fixed(includeInToString = false) ByteRunAutomaton automaton, @Fixed String pattern) { + if (input == null) { + return false; + } + return automaton.run(input.bytes, input.offset, input.length); + } + + private static final int MAX_LENGTH = 1024 * 64; + + /** + * Convert an {@link Automaton} to dot. + *

    + * This was borrowed from {@link Automaton#toDot} but has been modified to snip if the length + * grows too much and to format the bytes differently. + *

    + */ + public static String toDot(Automaton automaton) { + StringBuilder b = new StringBuilder(); + b.append("digraph Automaton {\n"); + b.append(" rankdir = LR\n"); + b.append(" node [width=0.2, height=0.2, fontsize=8]\n"); + int numStates = automaton.getNumStates(); + if (numStates > 0) { + b.append(" initial [shape=plaintext,label=\"\"]\n"); + b.append(" initial -> 0\n"); + } + + Transition t = new Transition(); + + too_big: for (int state = 0; state < numStates; ++state) { + b.append(" "); + b.append(state); + if (automaton.isAccept(state)) { + b.append(" [shape=doublecircle,label=\"").append(state).append("\"]\n"); + } else { + b.append(" [shape=circle,label=\"").append(state).append("\"]\n"); + } + + int numTransitions = automaton.initTransition(state, t); + + for (int i = 0; i < numTransitions; ++i) { + automaton.getNextTransition(t); + + assert t.max >= t.min; + + b.append(" "); + b.append(state); + b.append(" -> "); + b.append(t.dest); + b.append(" [label=\""); + appendByte(t.min, b); + if (t.max != t.min) { + b.append('-'); + appendByte(t.max, b); + } + + b.append("\"]\n"); + if (b.length() >= MAX_LENGTH) { + b.append("...snip..."); + break too_big; + } + } + } + + b.append('}'); + return b.toString(); + } + + static void appendByte(int c, StringBuilder b) { + if (c > 255) { + throw new UnsupportedOperationException("can only format bytes but got [" + c + "]"); + } + if (c == 34) { + b.append("\\\""); + return; + } + if (c == 92) { + b.append("\\\\"); + return; + } + if (c >= 33 && c <= 126) { + b.appendCodePoint(c); + return; + } + b.append("0x"); + String hex = Integer.toHexString(c); + switch (hex.length()) { + case 1 -> b.append('0').append(hex); + case 2 -> b.append(hex); + default -> throw new UnsupportedOperationException("can only format bytes"); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java similarity index 72% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index 510c9b7098926..3fe4b92ca8f25 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -5,17 +5,21 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.function.Function; + import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -public class RLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.RLike { +public class RLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.RLike implements EvaluatorMapper { public RLike(Source source, Expression value, RLikePattern pattern) { super(source, value, pattern); } @@ -38,4 +42,11 @@ protected RLike replaceChild(Expression newChild) { protected TypeResolution resolveType() { return isString(field(), sourceText(), DEFAULT); } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return AutomataMatch.toEvaluator(source(), toEvaluator.apply(field()), pattern().createAutomaton()); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java similarity index 62% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index f7039bb95d1b2..00d5cc7b439fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -5,17 +5,22 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import org.apache.lucene.util.automaton.Automata; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.function.Function; + import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -public class WildcardLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardLike { +public class WildcardLike extends org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardLike implements EvaluatorMapper { public WildcardLike(Source source, Expression left, WildcardPattern pattern) { super(source, left, pattern, false); } @@ -34,4 +39,16 @@ protected WildcardLike replaceChild(Expression newLeft) { protected TypeResolution resolveType() { return isString(field(), sourceText(), DEFAULT); } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator( + Function toEvaluator + ) { + return AutomataMatch.toEvaluator( + source(), + toEvaluator.apply(field()), + // The empty pattern will accept the empty string + pattern().pattern().length() == 0 ? Automata.makeEmptyString() : pattern().createAutomaton() + ); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 9b3bc5a9cc045..8cf6e165653e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -26,8 +26,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; @@ -110,6 +108,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; @@ -119,6 +118,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index d58e25391aa26..8a13c80c0ea68 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -23,11 +23,11 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 40526d35031d4..1eaf9e7fb49ab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -181,11 +181,20 @@ public static Expression deepCopyOfField(String name, DataType type) { */ protected abstract Expression build(Source source, List args); - protected Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { + /** + * Build an {@link Expression} where all inputs are field references, + * except those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. + */ + protected final Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { return build(testCase.getSource(), testCase.getDataAsFields()); } - protected Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { + /** + * Build an {@link Expression} where all inputs are anonymous functions + * that make a copy of the values from a field except + * those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. + */ + protected final Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { return build(testCase.getSource(), testCase.getDataAsDeepCopiedFields()); } @@ -255,7 +264,7 @@ private void testEvaluate(boolean readFloating) { } assertFalse("expected resolved", expression.typeResolved().unresolved()); expression = new FoldNull().rule(expression); - assertThat(expression.dataType(), equalTo(testCase.expectedType)); + assertThat(expression.dataType(), equalTo(testCase.expectedType())); logger.info("Result type: " + expression.dataType()); Object result; @@ -278,7 +287,7 @@ private void testEvaluate(boolean readFloating) { private Object toJavaObjectUnsignedLongAware(Block block, int position) { Object result; result = toJavaObject(block, position); - if (result != null && testCase.expectedType == DataTypes.UNSIGNED_LONG) { + if (result != null && testCase.expectedType() == DataTypes.UNSIGNED_LONG) { assertThat(result, instanceOf(Long.class)); result = NumericUtils.unsignedLongAsBigInteger((Long) result); } @@ -524,7 +533,7 @@ public final void testEvaluatorToString() { assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var factory = evaluator(buildFieldExpression(testCase)); try (ExpressionEvaluator ev = factory.get(driverContext())) { - assertThat(ev.toString(), equalTo(testCase.evaluatorToString)); + assertThat(ev.toString(), testCase.evaluatorToString()); } } @@ -532,7 +541,7 @@ public final void testFactoryToString() { assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); var factory = evaluator(buildFieldExpression(testCase)); - assertThat(factory.toString(), equalTo(testCase.evaluatorToString)); + assertThat(factory.toString(), testCase.evaluatorToString()); } public final void testFold() { @@ -544,12 +553,12 @@ public final void testFold() { } assertFalse(expression.typeResolved().unresolved()); Expression nullOptimized = new FoldNull().rule(expression); - assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType)); + assertThat(nullOptimized.dataType(), equalTo(testCase.expectedType())); assertTrue(nullOptimized.foldable()); if (testCase.foldingExceptionClass() == null) { Object result = nullOptimized.fold(); // Decode unsigned longs into BigIntegers - if (testCase.expectedType == DataTypes.UNSIGNED_LONG && result != null) { + if (testCase.expectedType() == DataTypes.UNSIGNED_LONG && result != null) { result = NumericUtils.unsignedLongAsBigInteger((Long) result); } assertThat(result, testCase.getMatcher()); @@ -670,11 +679,13 @@ protected static List anyNullIsNull(boolean entirelyNullPreser }).toList(); return new TestCaseSupplier.TestCase( data, - oc.evaluatorToString, - oc.expectedType, + oc.evaluatorToString(), + oc.expectedType(), nullValue(), null, - oc.getExpectedTypeError() + oc.getExpectedTypeError(), + null, + null ); })); @@ -691,11 +702,13 @@ protected static List anyNullIsNull(boolean entirelyNullPreser .toList(); return new TestCaseSupplier.TestCase( data, - "LiteralsEvaluator[lit=null]", - entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType, + equalTo("LiteralsEvaluator[lit=null]"), + entirelyNullPreservesType == false && oc.getData().size() == 1 ? DataTypes.NULL : oc.expectedType(), nullValue(), null, - oc.getExpectedTypeError() + oc.getExpectedTypeError(), + null, + null ); })); } @@ -1067,7 +1080,7 @@ public void trackSignature() { if (testCase.getData().stream().anyMatch(t -> t.type() == DataTypes.NULL)) { return; } - signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType); + signatures.putIfAbsent(testCase.getData().stream().map(TestCaseSupplier.TypedData::type).toList(), testCase.expectedType()); } @AfterClass diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 84f1b31fc8705..2bae1546cd02f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -54,7 +54,7 @@ public record TestCaseSupplier(String name, List types, Supplier { - private static Logger logger = LogManager.getLogger(TestCaseSupplier.class); + private static final Logger logger = LogManager.getLogger(TestCaseSupplier.class); /** * Build a test case without types. * @@ -1130,51 +1130,57 @@ public static class TestCase { /** * The {@link Source} this test case should be run with */ - private Source source; + private final Source source; /** * The parameter values and types to pass into the function for this test run */ - private List data; + private final List data; /** * The expected toString output for the evaluator this function invocation should generate */ - String evaluatorToString; + private final Matcher evaluatorToString; /** * The expected output type for the case being tested */ - DataType expectedType; + private final DataType expectedType; /** * A matcher to validate the output of the function run on the given input data */ - private Matcher matcher; + private final Matcher matcher; /** * Warnings this test is expected to produce */ - private String[] expectedWarnings; - - private Class foldingExceptionClass; - private String foldingExceptionMessage; + private final String[] expectedWarnings; private final String expectedTypeError; private final boolean allTypesAreRepresentable; + private final Class foldingExceptionClass; + private final String foldingExceptionMessage; + public TestCase(List data, String evaluatorToString, DataType expectedType, Matcher matcher) { - this(data, evaluatorToString, expectedType, matcher, null, null); + this(data, equalTo(evaluatorToString), expectedType, matcher); + } + + public TestCase(List data, Matcher evaluatorToString, DataType expectedType, Matcher matcher) { + this(data, evaluatorToString, expectedType, matcher, null, null, null, null); } public static TestCase typeError(List data, String expectedTypeError) { - return new TestCase(data, null, null, null, null, expectedTypeError); + return new TestCase(data, null, null, null, null, expectedTypeError, null, null); } TestCase( List data, - String evaluatorToString, + Matcher evaluatorToString, DataType expectedType, Matcher matcher, String[] expectedWarnings, - String expectedTypeError + String expectedTypeError, + Class foldingExceptionClass, + String foldingExceptionMessage ) { this.source = Source.EMPTY; this.data = data; @@ -1184,6 +1190,8 @@ public static TestCase typeError(List data, String expectedTypeError) this.expectedWarnings = expectedWarnings; this.expectedTypeError = expectedTypeError; this.allTypesAreRepresentable = data.stream().allMatch(d -> EsqlDataTypes.isRepresentable(d.type)); + this.foldingExceptionClass = foldingExceptionClass; + this.foldingExceptionMessage = foldingExceptionMessage; } public Source getSource() { @@ -1195,15 +1203,15 @@ public List getData() { } public List getDataAsFields() { - return data.stream().map(t -> AbstractFunctionTestCase.field(t.name(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asField).collect(Collectors.toList()); } public List getDataAsDeepCopiedFields() { - return data.stream().map(t -> AbstractFunctionTestCase.deepCopyOfField(t.name(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asDeepCopyOfField).collect(Collectors.toList()); } public List getDataAsLiterals() { - return data.stream().map(t -> new Literal(Source.synthetic(t.name()), t.data(), t.type())).collect(Collectors.toList()); + return data.stream().map(TypedData::asLiteral).collect(Collectors.toList()); } public List getDataValues() { @@ -1242,13 +1250,28 @@ public TestCase withWarning(String warning) { } else { newWarnings = new String[] { warning }; } - return new TestCase(data, evaluatorToString, expectedType, matcher, newWarnings, expectedTypeError); + return new TestCase( + data, + evaluatorToString, + expectedType, + matcher, + newWarnings, + expectedTypeError, + foldingExceptionClass, + foldingExceptionMessage + ); + } + + public TestCase withFoldingException(Class clazz, String message) { + return new TestCase(data, evaluatorToString, expectedType, matcher, expectedWarnings, expectedTypeError, clazz, message); + } + + public DataType expectedType() { + return expectedType; } - public TestCase withFoldingException(Class clazz, String message) { - foldingExceptionClass = clazz; - foldingExceptionMessage = message; - return this; + public Matcher evaluatorToString() { + return evaluatorToString; } } @@ -1265,18 +1288,55 @@ public TypedData get() { /** * Holds a data value and the intended parse type of that value - * @param data - value to test against - * @param type - type of the value, for building expressions - * @param name - a name for the value, used for generating test case names */ - public record TypedData(Object data, DataType type, String name) { - + public static class TypedData { public static final TypedData NULL = new TypedData(null, DataTypes.NULL, ""); + private final Object data; + private final DataType type; + private final String name; + private final boolean forceLiteral; + + /** + * @param data value to test against + * @param type type of the value, for building expressions + * @param name a name for the value, used for generating test case names + * @param forceLiteral should this data always be converted to a literal and never to a field reference? + */ + private TypedData(Object data, DataType type, String name, boolean forceLiteral) { + this.data = data; + this.type = type; + this.name = name; + this.forceLiteral = forceLiteral; + } + + /** + * @param data value to test against + * @param type type of the value, for building expressions + * @param name a name for the value, used for generating test case names + */ + public TypedData(Object data, DataType type, String name) { + this(data, type, name, false); + } + + /** + * Build a value, guessing the type via reflection. + * @param data value to test against + * @param name a name for the value, used for generating test case names + */ public TypedData(Object data, String name) { this(data, EsqlDataTypes.fromJava(data), name); } + /** + * Return a {@link TypedData} that always returns a {@link Literal} from + * {@link #asField} and {@link #asDeepCopyOfField}. Use this for things that + * must be constants. + */ + public TypedData forceLiteral() { + return new TypedData(data, type, name, true); + } + @Override public String toString() { if (type == DataTypes.UNSIGNED_LONG && data instanceof Long longData) { @@ -1284,5 +1344,53 @@ public String toString() { } return type.toString() + "(" + (data == null ? "null" : data.toString()) + ")"; } + + /** + * Convert this into reference to a field. + */ + public Expression asField() { + if (forceLiteral) { + return asLiteral(); + } + return AbstractFunctionTestCase.field(name, type); + } + + /** + * Convert this into an anonymous function that performs a copy of the values loaded from a field. + */ + public Expression asDeepCopyOfField() { + if (forceLiteral) { + return asLiteral(); + } + return AbstractFunctionTestCase.deepCopyOfField(name, type); + } + + /** + * Convert this into a {@link Literal}. + */ + public Literal asLiteral() { + return new Literal(Source.synthetic(name), data, type); + } + + /** + * Value to test against. + */ + public Object data() { + return data; + } + + /** + * Type of the value. For building {@link Expression}s. + */ + public DataType type() { + return type; + } + + /** + * A name for the value. Used to generate test names. + */ + public String name() { + return name; + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java index 97b8a95289c7d..478e45167b859 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSortTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -47,30 +46,6 @@ protected Expression build(Source source, List args) { return new MvSort(source, args.get(0), args.size() > 1 ? args.get(1) : null); } - /** - * Override to create the second argument as a Literal instead of a FieldAttribute. - */ - @Override - protected Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { - List args = new ArrayList<>(2); - List data = testCase.getData(); - args.add(AbstractFunctionTestCase.field(data.get(0).name(), data.get(0).type())); - args.add(new Literal(Source.synthetic(data.get(1).name()), data.get(1).data(), data.get(1).type())); - return build(testCase.getSource(), args); - } - - /** - * Override to create the second argument as a Literal instead of a FieldAttribute. - */ - @Override - protected Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { - List args = new ArrayList<>(2); - List data = testCase.getData(); - args.add(AbstractFunctionTestCase.deepCopyOfField(data.get(0).name(), data.get(0).type())); - args.add(new Literal(Source.synthetic(data.get(1).name()), data.get(1).data(), data.get(1).type())); - return build(testCase.getSource(), args); - } - private static void booleans(List suppliers) { suppliers.add(new TestCaseSupplier(List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD), () -> { List field = randomList(1, 10, () -> randomBoolean()); @@ -78,7 +53,7 @@ private static void booleans(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.BOOLEAN, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BOOLEAN + "[field=Attribute[channel=0], order=true]", DataTypes.BOOLEAN, @@ -95,7 +70,7 @@ private static void ints(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.INTEGER, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.INT + "[field=Attribute[channel=0], order=false]", DataTypes.INTEGER, @@ -111,7 +86,7 @@ private static void longs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.LONG, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=true]", DataTypes.LONG, @@ -125,7 +100,7 @@ private static void longs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.DATETIME, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.LONG + "[field=Attribute[channel=0], order=false]", DataTypes.DATETIME, @@ -141,7 +116,7 @@ private static void doubles(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.DOUBLE, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.DOUBLE + "[field=Attribute[channel=0], order=true]", DataTypes.DOUBLE, @@ -157,7 +132,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.KEYWORD, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", DataTypes.KEYWORD, @@ -171,7 +146,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.TEXT, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", DataTypes.TEXT, @@ -185,7 +160,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.IP, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=false]", DataTypes.IP, @@ -199,7 +174,7 @@ private static void bytesRefs(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(field, DataTypes.VERSION, "field"), - new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order") + new TestCaseSupplier.TypedData(order, DataTypes.KEYWORD, "order").forceLiteral() ), "MvSort" + ElementType.BYTES_REF + "[field=Attribute[channel=0], order=true]", DataTypes.VERSION, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java new file mode 100644 index 0000000000000..d367a7f77c981 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.RLikePattern; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; + +public class RLikeTests extends AbstractFunctionTestCase { + public RLikeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameters(() -> randomAlphaOfLength(1) + "?"); + } + + static Iterable parameters(Supplier optionalPattern) { + List cases = new ArrayList<>(); + cases.add( + new TestCaseSupplier( + "null", + List.of(DataTypes.NULL, DataTypes.KEYWORD, DataTypes.BOOLEAN), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(null, DataTypes.NULL, "e"), + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + "LiteralsEvaluator[lit=null]", + DataTypes.BOOLEAN, + nullValue() + ) + ) + ); + casesForString(cases, "empty string", () -> "", false, optionalPattern); + casesForString(cases, "single ascii character", () -> randomAlphaOfLength(1), true, optionalPattern); + casesForString(cases, "ascii string", () -> randomAlphaOfLengthBetween(2, 100), true, optionalPattern); + casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, optionalPattern); + casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, optionalPattern); + casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, optionalPattern); + for (DataType type : EsqlDataTypes.types()) { + if (type == DataTypes.KEYWORD || type == DataTypes.TEXT || type == DataTypes.NULL) { + continue; + } + if (EsqlDataTypes.isRepresentable(type) == false) { + continue; + } + cases.add( + new TestCaseSupplier( + List.of(type, DataTypes.KEYWORD, DataTypes.BOOLEAN), + () -> TestCaseSupplier.TestCase.typeError( + List.of( + new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "e"), + new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataTypes.KEYWORD, "pattern") + .forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + "argument of [] must be [string], found value [e] type [" + type.typeName() + "]" + ) + ) + ); + } + return parameterSuppliersFromTypedData(cases); + } + + record TextAndPattern(String text, String pattern) {} + + private static void casesForString( + List cases, + String title, + Supplier textSupplier, + boolean canGenerateDifferent, + Supplier optionalPattern + ) { + cases(cases, title + " matches self", () -> { + String text = textSupplier.get(); + return new TextAndPattern(text, text); + }, true); + cases(cases, title + " doesn't match self with trailing", () -> { + String text = textSupplier.get(); + return new TextAndPattern(text, text + randomAlphaOfLength(1)); + }, false); + cases(cases, title + " matches self with optional trailing", () -> { + String text = randomAlphaOfLength(1); + return new TextAndPattern(text, text + optionalPattern.get()); + }, true); + if (canGenerateDifferent) { + cases(cases, title + " doesn't match different", () -> { + String text = textSupplier.get(); + String different = randomValueOtherThan(text, textSupplier); + return new TextAndPattern(text, different); + }, false); + } + } + + private static void cases(List cases, String title, Supplier textAndPattern, boolean expected) { + for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT }) { + cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, type, DataTypes.BOOLEAN), () -> { + TextAndPattern v = textAndPattern.get(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(v.text), type, "e"), + new TestCaseSupplier.TypedData(new BytesRef(v.pattern), type, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "caseInsensitive").forceLiteral() + ), + startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), + DataTypes.BOOLEAN, + equalTo(expected) + ); + })); + } + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assumeFalse("generated test cases containing nulls by hand", true); + } + + @Override + protected Expression build(Source source, List args) { + Expression expression = args.get(0); + Literal pattern = (Literal) args.get(1); + Literal caseInsensitive = (Literal) args.get(2); + return new RLike( + source, + expression, + new RLikePattern(((BytesRef) pattern.fold()).utf8ToString()), + (Boolean) caseInsensitive.fold() + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java new file mode 100644 index 0000000000000..e70a57cfd5f0e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class WildcardLikeTests extends AbstractFunctionTestCase { + public WildcardLikeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return RLikeTests.parameters(() -> "*"); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assumeFalse("generated test cases containing nulls by hand", true); + } + + @Override + protected Expression build(Source source, List args) { + Expression expression = args.get(0); + Literal pattern = (Literal) args.get(1); + Literal caseInsensitive = (Literal) args.get(2); + assertThat(caseInsensitive.fold(), equalTo(false)); + return new WildcardLike(source, expression, new WildcardPattern(((BytesRef) pattern.fold()).utf8ToString())); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index f2bce6951151e..df806e0afaffb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -24,8 +24,6 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Avg; @@ -59,7 +57,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index f8607a101a93c..9e215e45fbde2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -14,8 +14,8 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike; -import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; From bceb38dc7f4900797e516d23f8552c86534fb189 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 19 Mar 2024 19:45:04 +0100 Subject: [PATCH 035/214] Fix default search timeout in watcher docs (#106404) --- docs/reference/watcher/input/search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/watcher/input/search.asciidoc b/docs/reference/watcher/input/search.asciidoc index 9ea7a7d5da728..96ec1e4ad6e5f 100644 --- a/docs/reference/watcher/input/search.asciidoc +++ b/docs/reference/watcher/input/search.asciidoc @@ -194,7 +194,7 @@ accurately. When a search generates a large response, you can use `extract` to select the relevant fields instead of loading the entire response. -| `timeout` | no | 30s | The timeout for waiting for the search api call to return. If no response is +| `timeout` | no | 1m | The timeout for waiting for the search api call to return. If no response is returned within this time, the search input times out and fails. This setting overrides the default search operations timeouts. |====== From 2ab2a06f61d4be851f11f77cff1ea2adf399c465 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 19 Mar 2024 12:19:47 -0700 Subject: [PATCH 036/214] Update bundled JDK to Java 22 (#106482) --- .../AdoptiumJdkToolchainResolver.java | 40 +++++-------------- .../OracleOpenJdkToolchainResolver.java | 6 +-- .../AdoptiumJdkToolchainResolverSpec.groovy | 8 +--- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 26 ++++++------ 5 files changed, 26 insertions(+), 56 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index 0270ee22ca8c5..89a40711c9a19 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -11,7 +11,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.commons.compress.utils.Lists; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainDownload; import org.gradle.jvm.toolchain.JavaToolchainRequest; @@ -21,17 +20,17 @@ import java.io.IOException; import java.net.URI; import java.net.URL; -import java.util.Comparator; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.StreamSupport; import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri; public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver { // package protected for better testing - final Map> CACHED_SEMVERS = new ConcurrentHashMap<>(); + final Map> CACHED_RELEASES = new ConcurrentHashMap<>(); @Override public Optional resolve(JavaToolchainRequest request) { @@ -39,7 +38,7 @@ public Optional resolve(JavaToolchainRequest request) { return Optional.empty(); } AdoptiumVersionRequest versionRequestKey = toVersionRequest(request); - Optional versionInfo = CACHED_SEMVERS.computeIfAbsent( + Optional versionInfo = CACHED_RELEASES.computeIfAbsent( versionRequestKey, (r) -> resolveAvailableVersion(versionRequestKey) ); @@ -54,12 +53,12 @@ private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) { return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion); } - private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { + private Optional resolveAvailableVersion(AdoptiumVersionRequest requestKey) { ObjectMapper mapper = new ObjectMapper(); try { int languageVersion = requestKey.languageVersion.asInt(); URL source = new URL( - "https://api.adoptium.net/v3/info/release_versions?architecture=" + "https://api.adoptium.net/v3/info/release_names?architecture=" + requestKey.arch + "&image_type=jdk&os=" + requestKey.platform @@ -71,14 +70,8 @@ private Optional resolveAvailableVersion(AdoptiumVersionReq + ")" ); JsonNode jsonNode = mapper.readTree(source); - JsonNode versionsNode = jsonNode.get("versions"); - return Optional.of( - Lists.newArrayList(versionsNode.iterator()) - .stream() - .map(this::toVersionInfo) - .max(Comparator.comparing(AdoptiumVersionInfo::semver)) - .get() - ); + JsonNode versionsNode = jsonNode.get("releases"); + return StreamSupport.stream(versionsNode.spliterator(), false).map(JsonNode::textValue).findFirst(); } catch (FileNotFoundException e) { // request combo not supported (e.g. aarch64 + windows return Optional.empty(); @@ -87,21 +80,10 @@ private Optional resolveAvailableVersion(AdoptiumVersionReq } } - private AdoptiumVersionInfo toVersionInfo(JsonNode node) { - return new AdoptiumVersionInfo( - node.get("build").asInt(), - node.get("major").asInt(), - node.get("minor").asInt(), - node.get("openjdk_version").asText(), - node.get("security").asInt(), - node.get("semver").asText() - ); - } - - private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { + private URI resolveDownloadURI(AdoptiumVersionRequest request, String version) { return URI.create( - "https://api.adoptium.net/v3/binary/version/jdk-" - + versionInfo.semver + "https://api.adoptium.net/v3/binary/version/" + + version + "/" + request.platform + "/" @@ -118,7 +100,5 @@ private boolean requestIsSupported(JavaToolchainRequest request) { return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM); } - record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {} - record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {} } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java index 818cb040c172e..162895fd486cf 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java @@ -39,11 +39,7 @@ record JdkBuild(JavaLanguageVersion languageVersion, String version, String buil ); // package private so it can be replaced by tests - List builds = List.of( - getBundledJdkBuild(), - // 22 release candidate - new JdkBuild(JavaLanguageVersion.of(22), "22", "36", "830ec9fcccef480bb3e73fb7ecafe059") - ); + List builds = List.of(getBundledJdkBuild()); private JdkBuild getBundledJdkBuild() { String bundledJdkVersion = VersionProperties.getBundledJdkVersion(); diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index 6383d577f027f..fe4a644ddfc1d 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -11,7 +11,6 @@ package org.elasticsearch.gradle.internal.toolchain import org.gradle.api.services.BuildServiceParameters import org.gradle.jvm.toolchain.JavaLanguageVersion import org.gradle.jvm.toolchain.JavaToolchainResolver -import org.gradle.platform.OperatingSystem import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString @@ -38,12 +37,7 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { toOsString(it[2], it[1]), toArchString(it[3]), languageVersion); - resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(), - 1, - 1, - "" + languageVersion.asInt() + ".1.1.1+37", - 0, "" + languageVersion.asInt() + ".1.1.1+37.1" - ))) + resolver.CACHED_RELEASES.put(request, Optional.of('jdk-' + languageVersion.asInt() + '.1.1.1+37.1')) } return resolver diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0883097e75aad..a2e8651810042 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,7 +2,7 @@ elasticsearch = 8.14.0 lucene = 9.10.0 bundled_jdk_vendor = openjdk -bundled_jdk = 21.0.2+13@f2283984656d49d69e91c558476027ac +bundled_jdk = 22+36@830ec9fcccef480bb3e73fb7ecafe059 # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 8978274e6df95..7e2e781d3ce62 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1694,25 +1694,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + From d362517769da0c55df0a36b36733aabfc18a459c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 12:31:35 -0700 Subject: [PATCH 037/214] Mute EsqlClientYamlIT esql/60_enrich/Enrich on keyword see https://github.com/elastic/elasticsearch/issues/106507 --- x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 09397710bb856..283d449937739 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -39,6 +39,10 @@ BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseN def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) + systemProperty("tests.rest.blacklist", [ + // https://github.com/elastic/elasticsearch/issues/106507 + "esql/60_enrich/Enrich on keyword" + ].join(',')) testClassesDirs = sourceSets.yamlRestTest.output.classesDirs classpath = sourceSets.yamlRestTest.runtimeClasspath } From 1541da5e6517f8461a427241b207abb1c35b499e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Mar 2024 15:40:13 -0400 Subject: [PATCH 038/214] ESQL: Generate more docs (#106367) This modifies the ESQL test infrastructure to generate more of the documentation for functions. It generates the *Description* section, the *Examples* section, and the *Parameters* section as separate files so we can use them as needed. It also generates a `layout` file that's just a guess as to how to render the whole thing. In some cases it'll work and we can use that instead of hand maintaining a "top level" description file for the function. Most newly generated files are unused. We have to chose to pick them up by replacing the sections we were manually maintaining with an include of the generated section. Or by replacing the entire hand maintained file with the generated top level file. Relates to #104247 --- docs/reference/esql/functions/abs.asciidoc | 4 +- docs/reference/esql/functions/acos.asciidoc | 2 - docs/reference/esql/functions/asin.asciidoc | 2 - docs/reference/esql/functions/atan.asciidoc | 4 +- docs/reference/esql/functions/atan2.asciidoc | 2 - docs/reference/esql/functions/binary.asciidoc | 18 ---- docs/reference/esql/functions/ceil.asciidoc | 4 +- .../esql/functions/coalesce.asciidoc | 24 +---- docs/reference/esql/functions/cos.asciidoc | 2 - docs/reference/esql/functions/cosh.asciidoc | 2 - .../esql/functions/date_diff.asciidoc | 2 - .../esql/functions/description/abs.asciidoc | 5 + .../esql/functions/description/acos.asciidoc | 5 + .../esql/functions/description/asin.asciidoc | 5 + .../esql/functions/description/atan.asciidoc | 5 + .../esql/functions/description/atan2.asciidoc | 5 + .../description/auto_bucket.asciidoc | 5 + .../esql/functions/description/case.asciidoc | 5 + .../esql/functions/description/ceil.asciidoc | 5 + .../functions/description/coalesce.asciidoc | 5 + .../functions/description/concat.asciidoc | 5 + .../esql/functions/description/cos.asciidoc | 5 + .../esql/functions/description/cosh.asciidoc | 5 + .../functions/description/date_diff.asciidoc | 5 + .../description/date_extract.asciidoc | 5 + .../functions/description/date_parse.asciidoc | 5 + .../esql/functions/description/e.asciidoc | 5 + .../functions/description/ends_with.asciidoc | 5 + .../esql/functions/description/floor.asciidoc | 5 + .../functions/description/greatest.asciidoc | 5 + .../esql/functions/description/least.asciidoc | 5 + .../esql/functions/description/left.asciidoc | 5 + .../functions/description/length.asciidoc | 5 + .../esql/functions/description/log.asciidoc | 5 + .../esql/functions/description/log10.asciidoc | 5 + .../esql/functions/description/ltrim.asciidoc | 5 + .../functions/description/mv_avg.asciidoc | 5 + .../functions/description/mv_concat.asciidoc | 5 + .../functions/description/mv_count.asciidoc | 5 + .../functions/description/mv_dedupe.asciidoc | 5 + .../functions/description/mv_first.asciidoc | 5 + .../functions/description/mv_last.asciidoc | 5 + .../functions/description/mv_max.asciidoc | 5 + .../functions/description/mv_median.asciidoc | 5 + .../functions/description/mv_min.asciidoc | 5 + .../functions/description/mv_slice.asciidoc | 5 + .../functions/description/mv_sort.asciidoc | 5 + .../functions/description/mv_sum.asciidoc | 5 + .../functions/description/mv_zip.asciidoc | 5 + .../esql/functions/description/pi.asciidoc | 5 + .../esql/functions/description/pow.asciidoc | 5 + .../functions/description/replace.asciidoc | 5 + .../esql/functions/description/right.asciidoc | 5 + .../esql/functions/description/round.asciidoc | 5 + .../esql/functions/description/rtrim.asciidoc | 5 + .../esql/functions/description/sin.asciidoc | 5 + .../esql/functions/description/sinh.asciidoc | 5 + .../esql/functions/description/split.asciidoc | 5 + .../esql/functions/description/sqrt.asciidoc | 5 + .../esql/functions/description/st_x.asciidoc | 5 + .../esql/functions/description/st_y.asciidoc | 5 + .../description/starts_with.asciidoc | 5 + .../functions/description/substring.asciidoc | 5 + .../esql/functions/description/tan.asciidoc | 5 + .../esql/functions/description/tanh.asciidoc | 5 + .../esql/functions/description/tau.asciidoc | 5 + .../functions/description/to_boolean.asciidoc | 5 + .../description/to_cartesianpoint.asciidoc | 5 + .../description/to_cartesianshape.asciidoc | 5 + .../description/to_datetime.asciidoc | 5 + .../functions/description/to_degrees.asciidoc | 5 + .../functions/description/to_double.asciidoc | 5 + .../description/to_geopoint.asciidoc | 5 + .../description/to_geoshape.asciidoc | 5 + .../functions/description/to_integer.asciidoc | 5 + .../esql/functions/description/to_ip.asciidoc | 5 + .../functions/description/to_long.asciidoc | 5 + .../functions/description/to_lower.asciidoc | 5 + .../functions/description/to_radians.asciidoc | 5 + .../functions/description/to_string.asciidoc | 5 + .../description/to_unsigned_long.asciidoc | 5 + .../functions/description/to_upper.asciidoc | 5 + .../functions/description/to_version.asciidoc | 5 + .../esql/functions/description/trim.asciidoc | 5 + .../esql/functions/ends_with.asciidoc | 2 - .../esql/functions/examples/coalesce.asciidoc | 13 +++ .../esql/functions/examples/left.asciidoc | 13 +++ docs/reference/esql/functions/floor.asciidoc | 4 +- .../esql/functions/greatest.asciidoc | 2 - .../esql/functions/layout/abs.asciidoc | 14 +++ .../esql/functions/layout/acos.asciidoc | 14 +++ .../esql/functions/layout/asin.asciidoc | 14 +++ .../esql/functions/layout/atan.asciidoc | 14 +++ .../esql/functions/layout/atan2.asciidoc | 14 +++ .../functions/layout/auto_bucket.asciidoc | 14 +++ .../esql/functions/layout/case.asciidoc | 14 +++ .../esql/functions/layout/ceil.asciidoc | 14 +++ .../esql/functions/layout/coalesce.asciidoc | 15 +++ .../esql/functions/layout/concat.asciidoc | 14 +++ .../esql/functions/layout/cos.asciidoc | 14 +++ .../esql/functions/layout/cosh.asciidoc | 14 +++ .../esql/functions/layout/date_diff.asciidoc | 14 +++ .../functions/layout/date_extract.asciidoc | 14 +++ .../esql/functions/layout/date_parse.asciidoc | 14 +++ .../esql/functions/layout/e.asciidoc | 14 +++ .../esql/functions/layout/ends_with.asciidoc | 14 +++ .../esql/functions/layout/floor.asciidoc | 14 +++ .../esql/functions/layout/greatest.asciidoc | 14 +++ .../esql/functions/layout/least.asciidoc | 14 +++ .../esql/functions/layout/left.asciidoc | 15 +++ .../esql/functions/layout/length.asciidoc | 14 +++ .../esql/functions/layout/log.asciidoc | 14 +++ .../esql/functions/layout/log10.asciidoc | 14 +++ .../esql/functions/layout/ltrim.asciidoc | 14 +++ .../esql/functions/layout/mv_avg.asciidoc | 14 +++ .../esql/functions/layout/mv_concat.asciidoc | 14 +++ .../esql/functions/layout/mv_count.asciidoc | 14 +++ .../esql/functions/layout/mv_dedupe.asciidoc | 14 +++ .../esql/functions/layout/mv_first.asciidoc | 14 +++ .../esql/functions/layout/mv_last.asciidoc | 14 +++ .../esql/functions/layout/mv_max.asciidoc | 14 +++ .../esql/functions/layout/mv_median.asciidoc | 14 +++ .../esql/functions/layout/mv_min.asciidoc | 14 +++ .../esql/functions/layout/mv_slice.asciidoc | 14 +++ .../esql/functions/layout/mv_sort.asciidoc | 14 +++ .../esql/functions/layout/mv_sum.asciidoc | 14 +++ .../esql/functions/layout/mv_zip.asciidoc | 14 +++ .../esql/functions/layout/pi.asciidoc | 14 +++ .../esql/functions/layout/pow.asciidoc | 14 +++ .../esql/functions/layout/replace.asciidoc | 14 +++ .../esql/functions/layout/right.asciidoc | 14 +++ .../esql/functions/layout/round.asciidoc | 14 +++ .../esql/functions/layout/rtrim.asciidoc | 14 +++ .../esql/functions/layout/sin.asciidoc | 14 +++ .../esql/functions/layout/sinh.asciidoc | 14 +++ .../esql/functions/layout/split.asciidoc | 14 +++ .../esql/functions/layout/sqrt.asciidoc | 14 +++ .../esql/functions/layout/st_x.asciidoc | 14 +++ .../esql/functions/layout/st_y.asciidoc | 14 +++ .../functions/layout/starts_with.asciidoc | 14 +++ .../esql/functions/layout/substring.asciidoc | 14 +++ .../esql/functions/layout/tan.asciidoc | 14 +++ .../esql/functions/layout/tanh.asciidoc | 14 +++ .../esql/functions/layout/tau.asciidoc | 14 +++ .../esql/functions/layout/to_boolean.asciidoc | 14 +++ .../layout/to_cartesianpoint.asciidoc | 14 +++ .../layout/to_cartesianshape.asciidoc | 14 +++ .../functions/layout/to_datetime.asciidoc | 14 +++ .../esql/functions/layout/to_degrees.asciidoc | 14 +++ .../esql/functions/layout/to_double.asciidoc | 14 +++ .../functions/layout/to_geopoint.asciidoc | 14 +++ .../functions/layout/to_geoshape.asciidoc | 14 +++ .../esql/functions/layout/to_integer.asciidoc | 14 +++ .../esql/functions/layout/to_ip.asciidoc | 14 +++ .../esql/functions/layout/to_long.asciidoc | 14 +++ .../esql/functions/layout/to_lower.asciidoc | 14 +++ .../esql/functions/layout/to_radians.asciidoc | 14 +++ .../esql/functions/layout/to_string.asciidoc | 14 +++ .../layout/to_unsigned_long.asciidoc | 14 +++ .../esql/functions/layout/to_upper.asciidoc | 14 +++ .../esql/functions/layout/to_version.asciidoc | 14 +++ .../esql/functions/layout/trim.asciidoc | 14 +++ docs/reference/esql/functions/least.asciidoc | 2 - docs/reference/esql/functions/left.asciidoc | 36 ------- docs/reference/esql/functions/log.asciidoc | 2 - docs/reference/esql/functions/log10.asciidoc | 2 - docs/reference/esql/functions/ltrim.asciidoc | 4 +- docs/reference/esql/functions/mv_avg.asciidoc | 2 - .../esql/functions/mv_concat.asciidoc | 2 - .../esql/functions/mv_count.asciidoc | 2 - .../esql/functions/mv_dedupe.asciidoc | 2 - .../esql/functions/mv_first.asciidoc | 2 - .../reference/esql/functions/mv_last.asciidoc | 2 - docs/reference/esql/functions/mv_max.asciidoc | 2 - .../esql/functions/mv_median.asciidoc | 2 - docs/reference/esql/functions/mv_min.asciidoc | 2 - .../esql/functions/mv_slice.asciidoc | 2 - .../reference/esql/functions/mv_sort.asciidoc | 2 - docs/reference/esql/functions/mv_sum.asciidoc | 2 - docs/reference/esql/functions/mv_zip.asciidoc | 2 - .../esql/functions/parameters/abs.asciidoc | 4 + .../esql/functions/parameters/acos.asciidoc | 4 + .../esql/functions/parameters/asin.asciidoc | 4 + .../esql/functions/parameters/atan.asciidoc | 4 + .../esql/functions/parameters/atan2.asciidoc | 7 ++ .../functions/parameters/auto_bucket.asciidoc | 13 +++ .../esql/functions/parameters/case.asciidoc | 7 ++ .../esql/functions/parameters/ceil.asciidoc | 4 + .../functions/parameters/coalesce.asciidoc | 7 ++ .../esql/functions/parameters/concat.asciidoc | 7 ++ .../esql/functions/parameters/cos.asciidoc | 4 + .../esql/functions/parameters/cosh.asciidoc | 4 + .../functions/parameters/date_diff.asciidoc | 10 ++ .../parameters/date_extract.asciidoc | 7 ++ .../functions/parameters/date_parse.asciidoc | 7 ++ .../esql/functions/parameters/e.asciidoc | 1 + .../functions/parameters/ends_with.asciidoc | 7 ++ .../esql/functions/parameters/floor.asciidoc | 4 + .../functions/parameters/greatest.asciidoc | 7 ++ .../esql/functions/parameters/least.asciidoc | 7 ++ .../esql/functions/parameters/left.asciidoc | 7 ++ .../esql/functions/parameters/length.asciidoc | 4 + .../esql/functions/parameters/log.asciidoc | 7 ++ .../esql/functions/parameters/log10.asciidoc | 4 + .../esql/functions/parameters/ltrim.asciidoc | 4 + .../esql/functions/parameters/mv_avg.asciidoc | 4 + .../functions/parameters/mv_concat.asciidoc | 7 ++ .../functions/parameters/mv_count.asciidoc | 4 + .../functions/parameters/mv_dedupe.asciidoc | 4 + .../functions/parameters/mv_first.asciidoc | 4 + .../functions/parameters/mv_last.asciidoc | 4 + .../esql/functions/parameters/mv_max.asciidoc | 4 + .../functions/parameters/mv_median.asciidoc | 4 + .../esql/functions/parameters/mv_min.asciidoc | 4 + .../functions/parameters/mv_slice.asciidoc | 10 ++ .../functions/parameters/mv_sort.asciidoc | 7 ++ .../esql/functions/parameters/mv_sum.asciidoc | 4 + .../esql/functions/parameters/mv_zip.asciidoc | 10 ++ .../esql/functions/parameters/pi.asciidoc | 1 + .../esql/functions/parameters/pow.asciidoc | 7 ++ .../functions/parameters/replace.asciidoc | 10 ++ .../esql/functions/parameters/right.asciidoc | 7 ++ .../esql/functions/parameters/round.asciidoc | 7 ++ .../esql/functions/parameters/rtrim.asciidoc | 4 + .../esql/functions/parameters/sin.asciidoc | 4 + .../esql/functions/parameters/sinh.asciidoc | 4 + .../esql/functions/parameters/split.asciidoc | 7 ++ .../esql/functions/parameters/sqrt.asciidoc | 4 + .../esql/functions/parameters/st_x.asciidoc | 4 + .../esql/functions/parameters/st_y.asciidoc | 4 + .../functions/parameters/starts_with.asciidoc | 7 ++ .../functions/parameters/substring.asciidoc | 10 ++ .../esql/functions/parameters/tan.asciidoc | 4 + .../esql/functions/parameters/tanh.asciidoc | 4 + .../esql/functions/parameters/tau.asciidoc | 1 + .../functions/parameters/to_boolean.asciidoc | 4 + .../parameters/to_cartesianpoint.asciidoc | 4 + .../parameters/to_cartesianshape.asciidoc | 4 + .../functions/parameters/to_datetime.asciidoc | 4 + .../functions/parameters/to_degrees.asciidoc | 4 + .../functions/parameters/to_double.asciidoc | 4 + .../functions/parameters/to_geopoint.asciidoc | 4 + .../functions/parameters/to_geoshape.asciidoc | 4 + .../functions/parameters/to_integer.asciidoc | 4 + .../esql/functions/parameters/to_ip.asciidoc | 4 + .../functions/parameters/to_long.asciidoc | 4 + .../functions/parameters/to_lower.asciidoc | 4 + .../functions/parameters/to_radians.asciidoc | 4 + .../functions/parameters/to_string.asciidoc | 4 + .../parameters/to_unsigned_long.asciidoc | 4 + .../functions/parameters/to_upper.asciidoc | 4 + .../functions/parameters/to_version.asciidoc | 4 + .../esql/functions/parameters/trim.asciidoc | 4 + docs/reference/esql/functions/pow.asciidoc | 2 - .../reference/esql/functions/replace.asciidoc | 2 - docs/reference/esql/functions/right.asciidoc | 2 - docs/reference/esql/functions/round.asciidoc | 2 - docs/reference/esql/functions/rtrim.asciidoc | 2 - docs/reference/esql/functions/sin.asciidoc | 2 - docs/reference/esql/functions/sinh.asciidoc | 2 - docs/reference/esql/functions/split.asciidoc | 2 - docs/reference/esql/functions/sqrt.asciidoc | 2 - docs/reference/esql/functions/st_x.asciidoc | 2 - docs/reference/esql/functions/st_y.asciidoc | 2 - .../esql/functions/starts_with.asciidoc | 2 - .../esql/functions/string-functions.asciidoc | 2 +- .../esql/functions/substring.asciidoc | 2 - docs/reference/esql/functions/tan.asciidoc | 2 - docs/reference/esql/functions/tanh.asciidoc | 2 - .../esql/functions/to_cartesianpoint.asciidoc | 2 - .../esql/functions/to_cartesianshape.asciidoc | 2 - .../esql/functions/to_geopoint.asciidoc | 2 - .../esql/functions/to_geoshape.asciidoc | 2 - .../esql/functions/to_lower.asciidoc | 2 - .../esql/functions/to_string.asciidoc | 2 - .../esql/functions/to_upper.asciidoc | 2 - .../esql/functions/to_version.asciidoc | 2 - docs/reference/esql/functions/trim.asciidoc | 2 - .../esql/functions/types/abs.asciidoc | 4 + .../esql/functions/types/acos.asciidoc | 4 + .../esql/functions/types/add.asciidoc | 4 + .../esql/functions/types/asin.asciidoc | 4 + .../esql/functions/types/atan.asciidoc | 4 + .../esql/functions/types/atan2.asciidoc | 4 + .../esql/functions/types/auto_bucket.asciidoc | 4 + .../esql/functions/types/case.asciidoc | 4 + .../esql/functions/types/ceil.asciidoc | 4 + .../esql/functions/types/coalesce.asciidoc | 4 + .../esql/functions/types/concat.asciidoc | 4 + .../esql/functions/types/cos.asciidoc | 4 + .../esql/functions/types/cosh.asciidoc | 4 + .../esql/functions/types/date_diff.asciidoc | 4 + .../functions/types/date_extract.asciidoc | 4 + .../esql/functions/types/date_parse.asciidoc | 4 + .../esql/functions/types/div.asciidoc | 4 + .../reference/esql/functions/types/e.asciidoc | 4 + .../esql/functions/types/ends_with.asciidoc | 4 + .../esql/functions/types/equals.asciidoc | 4 + .../esql/functions/types/floor.asciidoc | 4 + .../functions/types/greater_than.asciidoc | 4 + .../types/greater_than_or_equal.asciidoc | 4 + .../esql/functions/types/greatest.asciidoc | 4 + .../esql/functions/types/least.asciidoc | 4 + .../esql/functions/types/left.asciidoc | 4 + .../esql/functions/types/length.asciidoc | 4 + .../esql/functions/types/less_than.asciidoc | 4 + .../types/less_than_or_equal.asciidoc | 4 + .../esql/functions/types/log.asciidoc | 4 + .../esql/functions/types/log10.asciidoc | 4 + .../esql/functions/types/ltrim.asciidoc | 4 + .../esql/functions/types/mod.asciidoc | 4 + .../esql/functions/types/mul.asciidoc | 4 + .../esql/functions/types/mv_avg.asciidoc | 4 + .../esql/functions/types/mv_concat.asciidoc | 4 + .../esql/functions/types/mv_count.asciidoc | 4 + .../esql/functions/types/mv_dedupe.asciidoc | 4 + .../esql/functions/types/mv_first.asciidoc | 4 + .../esql/functions/types/mv_last.asciidoc | 4 + .../esql/functions/types/mv_max.asciidoc | 4 + .../esql/functions/types/mv_median.asciidoc | 4 + .../esql/functions/types/mv_min.asciidoc | 4 + .../esql/functions/types/mv_slice.asciidoc | 4 + .../esql/functions/types/mv_sort.asciidoc | 4 + .../esql/functions/types/mv_sum.asciidoc | 4 + .../esql/functions/types/mv_zip.asciidoc | 4 + .../esql/functions/types/neg.asciidoc | 4 + .../esql/functions/types/not_equals.asciidoc | 4 + .../esql/functions/types/pi.asciidoc | 4 + .../esql/functions/types/pow.asciidoc | 4 + .../esql/functions/types/replace.asciidoc | 4 + .../esql/functions/types/right.asciidoc | 4 + .../esql/functions/types/round.asciidoc | 4 + .../esql/functions/types/rtrim.asciidoc | 4 + .../esql/functions/types/sin.asciidoc | 4 + .../esql/functions/types/sinh.asciidoc | 4 + .../esql/functions/types/split.asciidoc | 4 + .../esql/functions/types/sqrt.asciidoc | 4 + .../esql/functions/types/st_x.asciidoc | 4 + .../esql/functions/types/st_y.asciidoc | 4 + .../esql/functions/types/starts_with.asciidoc | 4 + .../esql/functions/types/sub.asciidoc | 4 + .../esql/functions/types/substring.asciidoc | 4 + .../esql/functions/types/tan.asciidoc | 4 + .../esql/functions/types/tanh.asciidoc | 4 + .../esql/functions/types/tau.asciidoc | 4 + .../esql/functions/types/to_boolean.asciidoc | 4 + .../types/to_cartesianpoint.asciidoc | 4 + .../types/to_cartesianshape.asciidoc | 4 + .../esql/functions/types/to_datetime.asciidoc | 4 + .../esql/functions/types/to_degrees.asciidoc | 4 + .../esql/functions/types/to_double.asciidoc | 4 + .../esql/functions/types/to_geopoint.asciidoc | 4 + .../esql/functions/types/to_geoshape.asciidoc | 4 + .../esql/functions/types/to_integer.asciidoc | 4 + .../esql/functions/types/to_ip.asciidoc | 4 + .../esql/functions/types/to_long.asciidoc | 4 + .../esql/functions/types/to_lower.asciidoc | 4 + .../esql/functions/types/to_radians.asciidoc | 4 + .../esql/functions/types/to_string.asciidoc | 4 + .../functions/types/to_unsigned_long.asciidoc | 4 + .../esql/functions/types/to_upper.asciidoc | 4 + .../esql/functions/types/to_version.asciidoc | 4 + .../esql/functions/types/trim.asciidoc | 4 + .../src/main/resources/meta.csv-spec | 4 +- .../function/EsqlFunctionRegistry.java | 22 ++++- .../esql/expression/function/Example.java | 30 ++++++ .../expression/function/FunctionInfo.java | 5 + .../function/scalar/nulls/Coalesce.java | 4 +- .../function/scalar/string/Left.java | 8 +- .../function/AbstractFunctionTestCase.java | 97 +++++++++++++++++-- 370 files changed, 2295 insertions(+), 202 deletions(-) create mode 100644 docs/reference/esql/functions/description/abs.asciidoc create mode 100644 docs/reference/esql/functions/description/acos.asciidoc create mode 100644 docs/reference/esql/functions/description/asin.asciidoc create mode 100644 docs/reference/esql/functions/description/atan.asciidoc create mode 100644 docs/reference/esql/functions/description/atan2.asciidoc create mode 100644 docs/reference/esql/functions/description/auto_bucket.asciidoc create mode 100644 docs/reference/esql/functions/description/case.asciidoc create mode 100644 docs/reference/esql/functions/description/ceil.asciidoc create mode 100644 docs/reference/esql/functions/description/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/description/concat.asciidoc create mode 100644 docs/reference/esql/functions/description/cos.asciidoc create mode 100644 docs/reference/esql/functions/description/cosh.asciidoc create mode 100644 docs/reference/esql/functions/description/date_diff.asciidoc create mode 100644 docs/reference/esql/functions/description/date_extract.asciidoc create mode 100644 docs/reference/esql/functions/description/date_parse.asciidoc create mode 100644 docs/reference/esql/functions/description/e.asciidoc create mode 100644 docs/reference/esql/functions/description/ends_with.asciidoc create mode 100644 docs/reference/esql/functions/description/floor.asciidoc create mode 100644 docs/reference/esql/functions/description/greatest.asciidoc create mode 100644 docs/reference/esql/functions/description/least.asciidoc create mode 100644 docs/reference/esql/functions/description/left.asciidoc create mode 100644 docs/reference/esql/functions/description/length.asciidoc create mode 100644 docs/reference/esql/functions/description/log.asciidoc create mode 100644 docs/reference/esql/functions/description/log10.asciidoc create mode 100644 docs/reference/esql/functions/description/ltrim.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_avg.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_concat.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_count.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_dedupe.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_first.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_last.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_max.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_median.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_min.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_slice.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_sort.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_sum.asciidoc create mode 100644 docs/reference/esql/functions/description/mv_zip.asciidoc create mode 100644 docs/reference/esql/functions/description/pi.asciidoc create mode 100644 docs/reference/esql/functions/description/pow.asciidoc create mode 100644 docs/reference/esql/functions/description/replace.asciidoc create mode 100644 docs/reference/esql/functions/description/right.asciidoc create mode 100644 docs/reference/esql/functions/description/round.asciidoc create mode 100644 docs/reference/esql/functions/description/rtrim.asciidoc create mode 100644 docs/reference/esql/functions/description/sin.asciidoc create mode 100644 docs/reference/esql/functions/description/sinh.asciidoc create mode 100644 docs/reference/esql/functions/description/split.asciidoc create mode 100644 docs/reference/esql/functions/description/sqrt.asciidoc create mode 100644 docs/reference/esql/functions/description/st_x.asciidoc create mode 100644 docs/reference/esql/functions/description/st_y.asciidoc create mode 100644 docs/reference/esql/functions/description/starts_with.asciidoc create mode 100644 docs/reference/esql/functions/description/substring.asciidoc create mode 100644 docs/reference/esql/functions/description/tan.asciidoc create mode 100644 docs/reference/esql/functions/description/tanh.asciidoc create mode 100644 docs/reference/esql/functions/description/tau.asciidoc create mode 100644 docs/reference/esql/functions/description/to_boolean.asciidoc create mode 100644 docs/reference/esql/functions/description/to_cartesianpoint.asciidoc create mode 100644 docs/reference/esql/functions/description/to_cartesianshape.asciidoc create mode 100644 docs/reference/esql/functions/description/to_datetime.asciidoc create mode 100644 docs/reference/esql/functions/description/to_degrees.asciidoc create mode 100644 docs/reference/esql/functions/description/to_double.asciidoc create mode 100644 docs/reference/esql/functions/description/to_geopoint.asciidoc create mode 100644 docs/reference/esql/functions/description/to_geoshape.asciidoc create mode 100644 docs/reference/esql/functions/description/to_integer.asciidoc create mode 100644 docs/reference/esql/functions/description/to_ip.asciidoc create mode 100644 docs/reference/esql/functions/description/to_long.asciidoc create mode 100644 docs/reference/esql/functions/description/to_lower.asciidoc create mode 100644 docs/reference/esql/functions/description/to_radians.asciidoc create mode 100644 docs/reference/esql/functions/description/to_string.asciidoc create mode 100644 docs/reference/esql/functions/description/to_unsigned_long.asciidoc create mode 100644 docs/reference/esql/functions/description/to_upper.asciidoc create mode 100644 docs/reference/esql/functions/description/to_version.asciidoc create mode 100644 docs/reference/esql/functions/description/trim.asciidoc create mode 100644 docs/reference/esql/functions/examples/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/examples/left.asciidoc create mode 100644 docs/reference/esql/functions/layout/abs.asciidoc create mode 100644 docs/reference/esql/functions/layout/acos.asciidoc create mode 100644 docs/reference/esql/functions/layout/asin.asciidoc create mode 100644 docs/reference/esql/functions/layout/atan.asciidoc create mode 100644 docs/reference/esql/functions/layout/atan2.asciidoc create mode 100644 docs/reference/esql/functions/layout/auto_bucket.asciidoc create mode 100644 docs/reference/esql/functions/layout/case.asciidoc create mode 100644 docs/reference/esql/functions/layout/ceil.asciidoc create mode 100644 docs/reference/esql/functions/layout/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/layout/concat.asciidoc create mode 100644 docs/reference/esql/functions/layout/cos.asciidoc create mode 100644 docs/reference/esql/functions/layout/cosh.asciidoc create mode 100644 docs/reference/esql/functions/layout/date_diff.asciidoc create mode 100644 docs/reference/esql/functions/layout/date_extract.asciidoc create mode 100644 docs/reference/esql/functions/layout/date_parse.asciidoc create mode 100644 docs/reference/esql/functions/layout/e.asciidoc create mode 100644 docs/reference/esql/functions/layout/ends_with.asciidoc create mode 100644 docs/reference/esql/functions/layout/floor.asciidoc create mode 100644 docs/reference/esql/functions/layout/greatest.asciidoc create mode 100644 docs/reference/esql/functions/layout/least.asciidoc create mode 100644 docs/reference/esql/functions/layout/left.asciidoc create mode 100644 docs/reference/esql/functions/layout/length.asciidoc create mode 100644 docs/reference/esql/functions/layout/log.asciidoc create mode 100644 docs/reference/esql/functions/layout/log10.asciidoc create mode 100644 docs/reference/esql/functions/layout/ltrim.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_avg.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_concat.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_count.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_dedupe.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_first.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_last.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_max.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_median.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_min.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_slice.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_sort.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_sum.asciidoc create mode 100644 docs/reference/esql/functions/layout/mv_zip.asciidoc create mode 100644 docs/reference/esql/functions/layout/pi.asciidoc create mode 100644 docs/reference/esql/functions/layout/pow.asciidoc create mode 100644 docs/reference/esql/functions/layout/replace.asciidoc create mode 100644 docs/reference/esql/functions/layout/right.asciidoc create mode 100644 docs/reference/esql/functions/layout/round.asciidoc create mode 100644 docs/reference/esql/functions/layout/rtrim.asciidoc create mode 100644 docs/reference/esql/functions/layout/sin.asciidoc create mode 100644 docs/reference/esql/functions/layout/sinh.asciidoc create mode 100644 docs/reference/esql/functions/layout/split.asciidoc create mode 100644 docs/reference/esql/functions/layout/sqrt.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_x.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_y.asciidoc create mode 100644 docs/reference/esql/functions/layout/starts_with.asciidoc create mode 100644 docs/reference/esql/functions/layout/substring.asciidoc create mode 100644 docs/reference/esql/functions/layout/tan.asciidoc create mode 100644 docs/reference/esql/functions/layout/tanh.asciidoc create mode 100644 docs/reference/esql/functions/layout/tau.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_boolean.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_cartesianshape.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_datetime.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_degrees.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_double.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_geopoint.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_geoshape.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_integer.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_ip.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_long.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_lower.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_radians.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_string.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_unsigned_long.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_upper.asciidoc create mode 100644 docs/reference/esql/functions/layout/to_version.asciidoc create mode 100644 docs/reference/esql/functions/layout/trim.asciidoc delete mode 100644 docs/reference/esql/functions/left.asciidoc create mode 100644 docs/reference/esql/functions/parameters/abs.asciidoc create mode 100644 docs/reference/esql/functions/parameters/acos.asciidoc create mode 100644 docs/reference/esql/functions/parameters/asin.asciidoc create mode 100644 docs/reference/esql/functions/parameters/atan.asciidoc create mode 100644 docs/reference/esql/functions/parameters/atan2.asciidoc create mode 100644 docs/reference/esql/functions/parameters/auto_bucket.asciidoc create mode 100644 docs/reference/esql/functions/parameters/case.asciidoc create mode 100644 docs/reference/esql/functions/parameters/ceil.asciidoc create mode 100644 docs/reference/esql/functions/parameters/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/parameters/concat.asciidoc create mode 100644 docs/reference/esql/functions/parameters/cos.asciidoc create mode 100644 docs/reference/esql/functions/parameters/cosh.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_diff.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_extract.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_parse.asciidoc create mode 100644 docs/reference/esql/functions/parameters/e.asciidoc create mode 100644 docs/reference/esql/functions/parameters/ends_with.asciidoc create mode 100644 docs/reference/esql/functions/parameters/floor.asciidoc create mode 100644 docs/reference/esql/functions/parameters/greatest.asciidoc create mode 100644 docs/reference/esql/functions/parameters/least.asciidoc create mode 100644 docs/reference/esql/functions/parameters/left.asciidoc create mode 100644 docs/reference/esql/functions/parameters/length.asciidoc create mode 100644 docs/reference/esql/functions/parameters/log.asciidoc create mode 100644 docs/reference/esql/functions/parameters/log10.asciidoc create mode 100644 docs/reference/esql/functions/parameters/ltrim.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_avg.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_concat.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_count.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_dedupe.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_first.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_last.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_max.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_median.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_min.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_slice.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_sort.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_sum.asciidoc create mode 100644 docs/reference/esql/functions/parameters/mv_zip.asciidoc create mode 100644 docs/reference/esql/functions/parameters/pi.asciidoc create mode 100644 docs/reference/esql/functions/parameters/pow.asciidoc create mode 100644 docs/reference/esql/functions/parameters/replace.asciidoc create mode 100644 docs/reference/esql/functions/parameters/right.asciidoc create mode 100644 docs/reference/esql/functions/parameters/round.asciidoc create mode 100644 docs/reference/esql/functions/parameters/rtrim.asciidoc create mode 100644 docs/reference/esql/functions/parameters/sin.asciidoc create mode 100644 docs/reference/esql/functions/parameters/sinh.asciidoc create mode 100644 docs/reference/esql/functions/parameters/split.asciidoc create mode 100644 docs/reference/esql/functions/parameters/sqrt.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_x.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_y.asciidoc create mode 100644 docs/reference/esql/functions/parameters/starts_with.asciidoc create mode 100644 docs/reference/esql/functions/parameters/substring.asciidoc create mode 100644 docs/reference/esql/functions/parameters/tan.asciidoc create mode 100644 docs/reference/esql/functions/parameters/tanh.asciidoc create mode 100644 docs/reference/esql/functions/parameters/tau.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_boolean.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_datetime.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_degrees.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_double.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_geopoint.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_geoshape.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_integer.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_ip.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_long.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_lower.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_radians.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_string.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_upper.asciidoc create mode 100644 docs/reference/esql/functions/parameters/to_version.asciidoc create mode 100644 docs/reference/esql/functions/parameters/trim.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java diff --git a/docs/reference/esql/functions/abs.asciidoc b/docs/reference/esql/functions/abs.asciidoc index 32b49bc287a83..4913d3219f0ee 100644 --- a/docs/reference/esql/functions/abs.asciidoc +++ b/docs/reference/esql/functions/abs.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the absolute value. -*Supported types* - include::types/abs.asciidoc[] *Examples* @@ -38,4 +36,4 @@ include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees-result] -|=== \ No newline at end of file +|=== diff --git a/docs/reference/esql/functions/acos.asciidoc b/docs/reference/esql/functions/acos.asciidoc index e4d04bd169c78..9be03f830bbd7 100644 --- a/docs/reference/esql/functions/acos.asciidoc +++ b/docs/reference/esql/functions/acos.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. -*Supported types* - include::types/acos.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc index a326852e9b016..3c97a89435d73 100644 --- a/docs/reference/esql/functions/asin.asciidoc +++ b/docs/reference/esql/functions/asin.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. -*Supported types* - include::types/asin.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc index 604fc4d0bbecc..a662814cfe56c 100644 --- a/docs/reference/esql/functions/atan.asciidoc +++ b/docs/reference/esql/functions/atan.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. -*Supported types* - include::types/atan.asciidoc[] *Example* @@ -30,4 +28,4 @@ include::{esql-specs}/floats.csv-spec[tag=atan] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/floats.csv-spec[tag=atan-result] -|=== \ No newline at end of file +|=== diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc index 1920b4b7ac1a0..cc28b46339012 100644 --- a/docs/reference/esql/functions/atan2.asciidoc +++ b/docs/reference/esql/functions/atan2.asciidoc @@ -20,8 +20,6 @@ Numeric expression. If `null`, the function returns `null`. The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. -*Supported types* - include::types/atan2.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 2d4daa6ad2eca..431efab1c924a 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -23,70 +23,52 @@ include::types/not_equals.asciidoc[] [.text-center] image::esql/functions/signature/less_than.svg[Embedded,opts=inline] -Supported types: - include::types/less_than.asciidoc[] ==== Less than or equal to `<=` [.text-center] image::esql/functions/signature/less_than_or_equal.svg[Embedded,opts=inline] -Supported types: - include::types/less_than_or_equal.asciidoc[] ==== Greater than `>` [.text-center] image::esql/functions/signature/greater_than.svg[Embedded,opts=inline] -Supported types: - include::types/greater_than.asciidoc[] ==== Greater than or equal to `>=` [.text-center] image::esql/functions/signature/greater_than_or_equal.svg[Embedded,opts=inline] -Supported types: - include::types/greater_than_or_equal.asciidoc[] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] -Supported types: - include::types/add.asciidoc[] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] -Supported types: - include::types/sub.asciidoc[] ==== Multiply `*` [.text-center] image::esql/functions/signature/mul.svg[Embedded,opts=inline] -Supported types: - include::types/mul.asciidoc[] ==== Divide `/` [.text-center] image::esql/functions/signature/div.svg[Embedded,opts=inline] -Supported types: - include::types/div.asciidoc[] ==== Modulus `%` [.text-center] image::esql/functions/signature/mod.svg[Embedded,opts=inline] -Supported types: - include::types/mod.asciidoc[] diff --git a/docs/reference/esql/functions/ceil.asciidoc b/docs/reference/esql/functions/ceil.asciidoc index bc132e6bf47e6..ab163138821b1 100644 --- a/docs/reference/esql/functions/ceil.asciidoc +++ b/docs/reference/esql/functions/ceil.asciidoc @@ -17,11 +17,9 @@ Numeric expression. If `null`, the function returns `null`. Round a number up to the nearest integer. NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer + For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. -*Supported types* - include::types/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc index 1121a75209151..2d8c0f379c82e 100644 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ b/docs/reference/esql/functions/coalesce.asciidoc @@ -8,24 +8,6 @@ ---- COALESCE(expression1 [, ..., expressionN]) ---- - -*Parameters* - -`expressionX`:: -Expression to evaluate. - -*Description* - -Returns the first of its arguments that is not null. If all arguments are null, -it returns `null`. - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/null.csv-spec[tag=coalesce] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/null.csv-spec[tag=coalesce-result] -|=== +include::parameters/coalesce.asciidoc[] +include::description/coalesce.asciidoc[] +include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc index a5a0251bbd70a..7fa1d973c86b6 100644 --- a/docs/reference/esql/functions/cos.asciidoc +++ b/docs/reference/esql/functions/cos.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Sine_and_cosine[cosine] of `n`. Input expected in radians. -*Supported types* - include::types/cos.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/cosh.asciidoc b/docs/reference/esql/functions/cosh.asciidoc index 5883bc4b9d0c4..252064b60e13f 100644 --- a/docs/reference/esql/functions/cosh.asciidoc +++ b/docs/reference/esql/functions/cosh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine]. -*Supported types* - include::types/cosh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/date_diff.asciidoc b/docs/reference/esql/functions/date_diff.asciidoc index fa51e6f906110..0d24da9069250 100644 --- a/docs/reference/esql/functions/date_diff.asciidoc +++ b/docs/reference/esql/functions/date_diff.asciidoc @@ -46,8 +46,6 @@ s|abbreviations | nanosecond | nanoseconds, ns |=== -*Supported types* - include::types/date_diff.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/description/abs.asciidoc b/docs/reference/esql/functions/description/abs.asciidoc new file mode 100644 index 0000000000000..b2c765547cac3 --- /dev/null +++ b/docs/reference/esql/functions/description/abs.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the absolute value. diff --git a/docs/reference/esql/functions/description/acos.asciidoc b/docs/reference/esql/functions/description/acos.asciidoc new file mode 100644 index 0000000000000..8b539c7b377c9 --- /dev/null +++ b/docs/reference/esql/functions/description/acos.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +The arccosine of an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/asin.asciidoc b/docs/reference/esql/functions/description/asin.asciidoc new file mode 100644 index 0000000000000..7fdfde219cac2 --- /dev/null +++ b/docs/reference/esql/functions/description/asin.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Inverse sine trigonometric function. diff --git a/docs/reference/esql/functions/description/atan.asciidoc b/docs/reference/esql/functions/description/atan.asciidoc new file mode 100644 index 0000000000000..f1e4c3fe90a9c --- /dev/null +++ b/docs/reference/esql/functions/description/atan.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Inverse tangent trigonometric function. diff --git a/docs/reference/esql/functions/description/atan2.asciidoc b/docs/reference/esql/functions/description/atan2.asciidoc new file mode 100644 index 0000000000000..8642f404fc2cb --- /dev/null +++ b/docs/reference/esql/functions/description/atan2.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane. diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/auto_bucket.asciidoc new file mode 100644 index 0000000000000..2be3aa8943e97 --- /dev/null +++ b/docs/reference/esql/functions/description/auto_bucket.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into. diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc new file mode 100644 index 0000000000000..7deb8566a630f --- /dev/null +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. diff --git a/docs/reference/esql/functions/description/ceil.asciidoc b/docs/reference/esql/functions/description/ceil.asciidoc new file mode 100644 index 0000000000000..ed7ef6b581950 --- /dev/null +++ b/docs/reference/esql/functions/description/ceil.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Round a number up to the nearest integer. diff --git a/docs/reference/esql/functions/description/coalesce.asciidoc b/docs/reference/esql/functions/description/coalesce.asciidoc new file mode 100644 index 0000000000000..3cab2bac462b7 --- /dev/null +++ b/docs/reference/esql/functions/description/coalesce.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. diff --git a/docs/reference/esql/functions/description/concat.asciidoc b/docs/reference/esql/functions/description/concat.asciidoc new file mode 100644 index 0000000000000..4523b8eb166cd --- /dev/null +++ b/docs/reference/esql/functions/description/concat.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Concatenates two or more strings. diff --git a/docs/reference/esql/functions/description/cos.asciidoc b/docs/reference/esql/functions/description/cos.asciidoc new file mode 100644 index 0000000000000..394ff038b112f --- /dev/null +++ b/docs/reference/esql/functions/description/cos.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the trigonometric cosine of an angle diff --git a/docs/reference/esql/functions/description/cosh.asciidoc b/docs/reference/esql/functions/description/cosh.asciidoc new file mode 100644 index 0000000000000..50036dd1a1c18 --- /dev/null +++ b/docs/reference/esql/functions/description/cosh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the hyperbolic cosine of a number diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc new file mode 100644 index 0000000000000..53759a8c56158 --- /dev/null +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument diff --git a/docs/reference/esql/functions/description/date_extract.asciidoc b/docs/reference/esql/functions/description/date_extract.asciidoc new file mode 100644 index 0000000000000..e9f13fdba89a8 --- /dev/null +++ b/docs/reference/esql/functions/description/date_extract.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Extracts parts of a date, like year, month, day, hour. diff --git a/docs/reference/esql/functions/description/date_parse.asciidoc b/docs/reference/esql/functions/description/date_parse.asciidoc new file mode 100644 index 0000000000000..75eb2490cff93 --- /dev/null +++ b/docs/reference/esql/functions/description/date_parse.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Parses a string into a date value diff --git a/docs/reference/esql/functions/description/e.asciidoc b/docs/reference/esql/functions/description/e.asciidoc new file mode 100644 index 0000000000000..b6804212124c9 --- /dev/null +++ b/docs/reference/esql/functions/description/e.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Euler’s number. diff --git a/docs/reference/esql/functions/description/ends_with.asciidoc b/docs/reference/esql/functions/description/ends_with.asciidoc new file mode 100644 index 0000000000000..93b8873fbf56f --- /dev/null +++ b/docs/reference/esql/functions/description/ends_with.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a boolean that indicates whether a keyword string ends with another string diff --git a/docs/reference/esql/functions/description/floor.asciidoc b/docs/reference/esql/functions/description/floor.asciidoc new file mode 100644 index 0000000000000..ecea344dd35cd --- /dev/null +++ b/docs/reference/esql/functions/description/floor.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Round a number down to the nearest integer. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc new file mode 100644 index 0000000000000..86debd63f4ff9 --- /dev/null +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the maximum value from many columns. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc new file mode 100644 index 0000000000000..6f403e9cb8574 --- /dev/null +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the minimum value from many columns. diff --git a/docs/reference/esql/functions/description/left.asciidoc b/docs/reference/esql/functions/description/left.asciidoc new file mode 100644 index 0000000000000..0c06cdf5cb5e9 --- /dev/null +++ b/docs/reference/esql/functions/description/left.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the substring that extracts 'length' chars from 'str' starting from the left. diff --git a/docs/reference/esql/functions/description/length.asciidoc b/docs/reference/esql/functions/description/length.asciidoc new file mode 100644 index 0000000000000..4c97428cdf8a0 --- /dev/null +++ b/docs/reference/esql/functions/description/length.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the character length of a string. diff --git a/docs/reference/esql/functions/description/log.asciidoc b/docs/reference/esql/functions/description/log.asciidoc new file mode 100644 index 0000000000000..85184be160fec --- /dev/null +++ b/docs/reference/esql/functions/description/log.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the logarithm of a value to a base. diff --git a/docs/reference/esql/functions/description/log10.asciidoc b/docs/reference/esql/functions/description/log10.asciidoc new file mode 100644 index 0000000000000..f9c98e1416971 --- /dev/null +++ b/docs/reference/esql/functions/description/log10.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the log base 10. diff --git a/docs/reference/esql/functions/description/ltrim.asciidoc b/docs/reference/esql/functions/description/ltrim.asciidoc new file mode 100644 index 0000000000000..163c7629bacea --- /dev/null +++ b/docs/reference/esql/functions/description/ltrim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Removes leading whitespaces from a string. diff --git a/docs/reference/esql/functions/description/mv_avg.asciidoc b/docs/reference/esql/functions/description/mv_avg.asciidoc new file mode 100644 index 0000000000000..d02992e80f1fd --- /dev/null +++ b/docs/reference/esql/functions/description/mv_avg.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a multivalued field into a single valued field containing the average of all of the values. diff --git a/docs/reference/esql/functions/description/mv_concat.asciidoc b/docs/reference/esql/functions/description/mv_concat.asciidoc new file mode 100644 index 0000000000000..d59c1ae9dea3e --- /dev/null +++ b/docs/reference/esql/functions/description/mv_concat.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued string field to a single valued field by concatenating all values. diff --git a/docs/reference/esql/functions/description/mv_count.asciidoc b/docs/reference/esql/functions/description/mv_count.asciidoc new file mode 100644 index 0000000000000..107caf432de22 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_count.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued field to a single valued field containing the count of values. diff --git a/docs/reference/esql/functions/description/mv_dedupe.asciidoc b/docs/reference/esql/functions/description/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..6b8aeab1ae2b0 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_dedupe.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Remove duplicate values from a multivalued field. diff --git a/docs/reference/esql/functions/description/mv_first.asciidoc b/docs/reference/esql/functions/description/mv_first.asciidoc new file mode 100644 index 0000000000000..ce65b75d2d5e9 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_first.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued field to a single valued field containing the first value. diff --git a/docs/reference/esql/functions/description/mv_last.asciidoc b/docs/reference/esql/functions/description/mv_last.asciidoc new file mode 100644 index 0000000000000..77f9aa1501da5 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_last.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued field to a single valued field containing the last value. diff --git a/docs/reference/esql/functions/description/mv_max.asciidoc b/docs/reference/esql/functions/description/mv_max.asciidoc new file mode 100644 index 0000000000000..6564b9d554d73 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_max.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued field to a single valued field containing the maximum value. diff --git a/docs/reference/esql/functions/description/mv_median.asciidoc b/docs/reference/esql/functions/description/mv_median.asciidoc new file mode 100644 index 0000000000000..800d22d7b8186 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_median.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a multivalued field into a single valued field containing the median value. diff --git a/docs/reference/esql/functions/description/mv_min.asciidoc b/docs/reference/esql/functions/description/mv_min.asciidoc new file mode 100644 index 0000000000000..425c50d5a7eb6 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_min.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Reduce a multivalued field to a single valued field containing the minimum value. diff --git a/docs/reference/esql/functions/description/mv_slice.asciidoc b/docs/reference/esql/functions/description/mv_slice.asciidoc new file mode 100644 index 0000000000000..b2a59313f5f73 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_slice.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a subset of the multivalued field using the start and end index values. diff --git a/docs/reference/esql/functions/description/mv_sort.asciidoc b/docs/reference/esql/functions/description/mv_sort.asciidoc new file mode 100644 index 0000000000000..aa9ba350b7619 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_sort.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Sorts a multivalued field in lexicographical order. diff --git a/docs/reference/esql/functions/description/mv_sum.asciidoc b/docs/reference/esql/functions/description/mv_sum.asciidoc new file mode 100644 index 0000000000000..8496d734278f4 --- /dev/null +++ b/docs/reference/esql/functions/description/mv_sum.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a multivalued field into a single valued field containing the sum of all of the values. diff --git a/docs/reference/esql/functions/description/mv_zip.asciidoc b/docs/reference/esql/functions/description/mv_zip.asciidoc new file mode 100644 index 0000000000000..de4b15a5e362f --- /dev/null +++ b/docs/reference/esql/functions/description/mv_zip.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Combines the values from two multivalued fields with a delimiter that joins them together. diff --git a/docs/reference/esql/functions/description/pi.asciidoc b/docs/reference/esql/functions/description/pi.asciidoc new file mode 100644 index 0000000000000..47be5503ba369 --- /dev/null +++ b/docs/reference/esql/functions/description/pi.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +The ratio of a circle’s circumference to its diameter. diff --git a/docs/reference/esql/functions/description/pow.asciidoc b/docs/reference/esql/functions/description/pow.asciidoc new file mode 100644 index 0000000000000..5a8f2563f1e33 --- /dev/null +++ b/docs/reference/esql/functions/description/pow.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the value of a base raised to the power of an exponent. diff --git a/docs/reference/esql/functions/description/replace.asciidoc b/docs/reference/esql/functions/description/replace.asciidoc new file mode 100644 index 0000000000000..e8df184a4c0a2 --- /dev/null +++ b/docs/reference/esql/functions/description/replace.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +The function substitutes in the string any match of the regular expression with the replacement string. diff --git a/docs/reference/esql/functions/description/right.asciidoc b/docs/reference/esql/functions/description/right.asciidoc new file mode 100644 index 0000000000000..e14308df21547 --- /dev/null +++ b/docs/reference/esql/functions/description/right.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Return the substring that extracts length chars from the string starting from the right. diff --git a/docs/reference/esql/functions/description/round.asciidoc b/docs/reference/esql/functions/description/round.asciidoc new file mode 100644 index 0000000000000..1c3aefd487ac4 --- /dev/null +++ b/docs/reference/esql/functions/description/round.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Rounds a number to the closest number with the specified number of digits. diff --git a/docs/reference/esql/functions/description/rtrim.asciidoc b/docs/reference/esql/functions/description/rtrim.asciidoc new file mode 100644 index 0000000000000..3041427026375 --- /dev/null +++ b/docs/reference/esql/functions/description/rtrim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Removes trailing whitespaces from a string. diff --git a/docs/reference/esql/functions/description/sin.asciidoc b/docs/reference/esql/functions/description/sin.asciidoc new file mode 100644 index 0000000000000..0013fe5b17757 --- /dev/null +++ b/docs/reference/esql/functions/description/sin.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the trigonometric sine of an angle diff --git a/docs/reference/esql/functions/description/sinh.asciidoc b/docs/reference/esql/functions/description/sinh.asciidoc new file mode 100644 index 0000000000000..46385f454fd74 --- /dev/null +++ b/docs/reference/esql/functions/description/sinh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the hyperbolic sine of a number diff --git a/docs/reference/esql/functions/description/split.asciidoc b/docs/reference/esql/functions/description/split.asciidoc new file mode 100644 index 0000000000000..1344d34e8f7b7 --- /dev/null +++ b/docs/reference/esql/functions/description/split.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Split a single valued string into multiple strings. diff --git a/docs/reference/esql/functions/description/sqrt.asciidoc b/docs/reference/esql/functions/description/sqrt.asciidoc new file mode 100644 index 0000000000000..6e034a154bb8d --- /dev/null +++ b/docs/reference/esql/functions/description/sqrt.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the square root of a number. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc new file mode 100644 index 0000000000000..0e6ebc1a5ab63 --- /dev/null +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Extracts the x-coordinate from a point geometry. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc new file mode 100644 index 0000000000000..a90cdf5ecd2de --- /dev/null +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Extracts the y-coordinate from a point geometry. diff --git a/docs/reference/esql/functions/description/starts_with.asciidoc b/docs/reference/esql/functions/description/starts_with.asciidoc new file mode 100644 index 0000000000000..4d07dfae3046d --- /dev/null +++ b/docs/reference/esql/functions/description/starts_with.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a boolean that indicates whether a keyword string starts with another string diff --git a/docs/reference/esql/functions/description/substring.asciidoc b/docs/reference/esql/functions/description/substring.asciidoc new file mode 100644 index 0000000000000..7771777b37999 --- /dev/null +++ b/docs/reference/esql/functions/description/substring.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a substring of a string, specified by a start position and an optional length diff --git a/docs/reference/esql/functions/description/tan.asciidoc b/docs/reference/esql/functions/description/tan.asciidoc new file mode 100644 index 0000000000000..16a8093c9a824 --- /dev/null +++ b/docs/reference/esql/functions/description/tan.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the trigonometric tangent of an angle diff --git a/docs/reference/esql/functions/description/tanh.asciidoc b/docs/reference/esql/functions/description/tanh.asciidoc new file mode 100644 index 0000000000000..ba47db32fb371 --- /dev/null +++ b/docs/reference/esql/functions/description/tanh.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns the hyperbolic tangent of a number diff --git a/docs/reference/esql/functions/description/tau.asciidoc b/docs/reference/esql/functions/description/tau.asciidoc new file mode 100644 index 0000000000000..5bcb250ed71d1 --- /dev/null +++ b/docs/reference/esql/functions/description/tau.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +The ratio of a circle’s circumference to its radius. diff --git a/docs/reference/esql/functions/description/to_boolean.asciidoc b/docs/reference/esql/functions/description/to_boolean.asciidoc new file mode 100644 index 0000000000000..49dc326c2260c --- /dev/null +++ b/docs/reference/esql/functions/description/to_boolean.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a boolean value. diff --git a/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..39261615083d5 --- /dev/null +++ b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a point value. diff --git a/docs/reference/esql/functions/description/to_cartesianshape.asciidoc b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..fa73652b3a4ae --- /dev/null +++ b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a shape value. diff --git a/docs/reference/esql/functions/description/to_datetime.asciidoc b/docs/reference/esql/functions/description/to_datetime.asciidoc new file mode 100644 index 0000000000000..39347f90def7f --- /dev/null +++ b/docs/reference/esql/functions/description/to_datetime.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a date value. diff --git a/docs/reference/esql/functions/description/to_degrees.asciidoc b/docs/reference/esql/functions/description/to_degrees.asciidoc new file mode 100644 index 0000000000000..b5d0f2bf5054f --- /dev/null +++ b/docs/reference/esql/functions/description/to_degrees.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a number in radians to degrees. diff --git a/docs/reference/esql/functions/description/to_double.asciidoc b/docs/reference/esql/functions/description/to_double.asciidoc new file mode 100644 index 0000000000000..b4a8c4a6a5a7c --- /dev/null +++ b/docs/reference/esql/functions/description/to_double.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a double value. diff --git a/docs/reference/esql/functions/description/to_geopoint.asciidoc b/docs/reference/esql/functions/description/to_geopoint.asciidoc new file mode 100644 index 0000000000000..a035c52e29cd9 --- /dev/null +++ b/docs/reference/esql/functions/description/to_geopoint.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a geo_point value. diff --git a/docs/reference/esql/functions/description/to_geoshape.asciidoc b/docs/reference/esql/functions/description/to_geoshape.asciidoc new file mode 100644 index 0000000000000..bf6f21e908ab7 --- /dev/null +++ b/docs/reference/esql/functions/description/to_geoshape.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a geo_shape value. diff --git a/docs/reference/esql/functions/description/to_integer.asciidoc b/docs/reference/esql/functions/description/to_integer.asciidoc new file mode 100644 index 0000000000000..f31ff152c1c73 --- /dev/null +++ b/docs/reference/esql/functions/description/to_integer.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to an integer value. diff --git a/docs/reference/esql/functions/description/to_ip.asciidoc b/docs/reference/esql/functions/description/to_ip.asciidoc new file mode 100644 index 0000000000000..720ae28d35848 --- /dev/null +++ b/docs/reference/esql/functions/description/to_ip.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input string to an IP value. diff --git a/docs/reference/esql/functions/description/to_long.asciidoc b/docs/reference/esql/functions/description/to_long.asciidoc new file mode 100644 index 0000000000000..86e6377edb7fe --- /dev/null +++ b/docs/reference/esql/functions/description/to_long.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to a long value. diff --git a/docs/reference/esql/functions/description/to_lower.asciidoc b/docs/reference/esql/functions/description/to_lower.asciidoc new file mode 100644 index 0000000000000..5d4285ffb40c2 --- /dev/null +++ b/docs/reference/esql/functions/description/to_lower.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a new string representing the input string converted to lower case. diff --git a/docs/reference/esql/functions/description/to_radians.asciidoc b/docs/reference/esql/functions/description/to_radians.asciidoc new file mode 100644 index 0000000000000..b2c0a8c278f30 --- /dev/null +++ b/docs/reference/esql/functions/description/to_radians.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a number in degrees to radians. diff --git a/docs/reference/esql/functions/description/to_string.asciidoc b/docs/reference/esql/functions/description/to_string.asciidoc new file mode 100644 index 0000000000000..0a0be5ef7ed65 --- /dev/null +++ b/docs/reference/esql/functions/description/to_string.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts a field into a string. diff --git a/docs/reference/esql/functions/description/to_unsigned_long.asciidoc b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..cb98f90ad1ab1 --- /dev/null +++ b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input value to an unsigned long value. diff --git a/docs/reference/esql/functions/description/to_upper.asciidoc b/docs/reference/esql/functions/description/to_upper.asciidoc new file mode 100644 index 0000000000000..a1a4ac32b775e --- /dev/null +++ b/docs/reference/esql/functions/description/to_upper.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns a new string representing the input string converted to upper case. diff --git a/docs/reference/esql/functions/description/to_version.asciidoc b/docs/reference/esql/functions/description/to_version.asciidoc new file mode 100644 index 0000000000000..6d8112800d122 --- /dev/null +++ b/docs/reference/esql/functions/description/to_version.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Converts an input string to a version value. diff --git a/docs/reference/esql/functions/description/trim.asciidoc b/docs/reference/esql/functions/description/trim.asciidoc new file mode 100644 index 0000000000000..49e50173f5d04 --- /dev/null +++ b/docs/reference/esql/functions/description/trim.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Removes leading and trailing whitespaces from a string. diff --git a/docs/reference/esql/functions/ends_with.asciidoc b/docs/reference/esql/functions/ends_with.asciidoc index 49477996ada19..23ad8df65b8fd 100644 --- a/docs/reference/esql/functions/ends_with.asciidoc +++ b/docs/reference/esql/functions/ends_with.asciidoc @@ -20,8 +20,6 @@ String expression. If `null`, the function returns `null`. Returns a boolean that indicates whether a keyword string ends with another string. -*Supported types* - include::types/ends_with.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/examples/coalesce.asciidoc b/docs/reference/esql/functions/examples/coalesce.asciidoc new file mode 100644 index 0000000000000..b4ba51168fb8c --- /dev/null +++ b/docs/reference/esql/functions/examples/coalesce.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/null.csv-spec[tag=coalesce] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/null.csv-spec[tag=coalesce-result] +|=== + diff --git a/docs/reference/esql/functions/examples/left.asciidoc b/docs/reference/esql/functions/examples/left.asciidoc new file mode 100644 index 0000000000000..22ae9239c31f8 --- /dev/null +++ b/docs/reference/esql/functions/examples/left.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=left] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=left-result] +|=== + diff --git a/docs/reference/esql/functions/floor.asciidoc b/docs/reference/esql/functions/floor.asciidoc index 0730a87e595fd..69d8657d008b2 100644 --- a/docs/reference/esql/functions/floor.asciidoc +++ b/docs/reference/esql/functions/floor.asciidoc @@ -17,11 +17,9 @@ Numeric expression. If `null`, the function returns `null`. Rounds a number down to the nearest integer. NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer + For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#floor(double)[Math.floor]. -*Supported types* - include::types/floor.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc index b9fc114d39ec6..003f1f46e6db5 100644 --- a/docs/reference/esql/functions/greatest.asciidoc +++ b/docs/reference/esql/functions/greatest.asciidoc @@ -24,8 +24,6 @@ NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. -*Supported types* - include::types/greatest.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/layout/abs.asciidoc b/docs/reference/esql/functions/layout/abs.asciidoc new file mode 100644 index 0000000000000..672fe555e276b --- /dev/null +++ b/docs/reference/esql/functions/layout/abs.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-abs]] +=== `ABS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/abs.svg[Embedded,opts=inline] + +include::../parameters/abs.asciidoc[] +include::../description/abs.asciidoc[] +include::../types/abs.asciidoc[] diff --git a/docs/reference/esql/functions/layout/acos.asciidoc b/docs/reference/esql/functions/layout/acos.asciidoc new file mode 100644 index 0000000000000..427223c6f9bca --- /dev/null +++ b/docs/reference/esql/functions/layout/acos.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-acos]] +=== `ACOS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/acos.svg[Embedded,opts=inline] + +include::../parameters/acos.asciidoc[] +include::../description/acos.asciidoc[] +include::../types/acos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/asin.asciidoc b/docs/reference/esql/functions/layout/asin.asciidoc new file mode 100644 index 0000000000000..b4c0fff8995e6 --- /dev/null +++ b/docs/reference/esql/functions/layout/asin.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-asin]] +=== `ASIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/asin.svg[Embedded,opts=inline] + +include::../parameters/asin.asciidoc[] +include::../description/asin.asciidoc[] +include::../types/asin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan.asciidoc b/docs/reference/esql/functions/layout/atan.asciidoc new file mode 100644 index 0000000000000..933d40c7e1d96 --- /dev/null +++ b/docs/reference/esql/functions/layout/atan.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-atan]] +=== `ATAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/atan.svg[Embedded,opts=inline] + +include::../parameters/atan.asciidoc[] +include::../description/atan.asciidoc[] +include::../types/atan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan2.asciidoc b/docs/reference/esql/functions/layout/atan2.asciidoc new file mode 100644 index 0000000000000..9a2cfbfdf01d0 --- /dev/null +++ b/docs/reference/esql/functions/layout/atan2.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-atan2]] +=== `ATAN2` + +*Syntax* + +[.text-center] +image::esql/functions/signature/atan2.svg[Embedded,opts=inline] + +include::../parameters/atan2.asciidoc[] +include::../description/atan2.asciidoc[] +include::../types/atan2.asciidoc[] diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc new file mode 100644 index 0000000000000..64c6fbcd7b627 --- /dev/null +++ b/docs/reference/esql/functions/layout/auto_bucket.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-auto_bucket]] +=== `AUTO_BUCKET` + +*Syntax* + +[.text-center] +image::esql/functions/signature/auto_bucket.svg[Embedded,opts=inline] + +include::../parameters/auto_bucket.asciidoc[] +include::../description/auto_bucket.asciidoc[] +include::../types/auto_bucket.asciidoc[] diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc new file mode 100644 index 0000000000000..80ac137f50e08 --- /dev/null +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-case]] +=== `CASE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/case.svg[Embedded,opts=inline] + +include::../parameters/case.asciidoc[] +include::../description/case.asciidoc[] +include::../types/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ceil.asciidoc b/docs/reference/esql/functions/layout/ceil.asciidoc new file mode 100644 index 0000000000000..ebbf7b0adbced --- /dev/null +++ b/docs/reference/esql/functions/layout/ceil.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-ceil]] +=== `CEIL` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ceil.svg[Embedded,opts=inline] + +include::../parameters/ceil.asciidoc[] +include::../description/ceil.asciidoc[] +include::../types/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/layout/coalesce.asciidoc b/docs/reference/esql/functions/layout/coalesce.asciidoc new file mode 100644 index 0000000000000..4ef3d3a682ea7 --- /dev/null +++ b/docs/reference/esql/functions/layout/coalesce.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-coalesce]] +=== `COALESCE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/coalesce.svg[Embedded,opts=inline] + +include::../parameters/coalesce.asciidoc[] +include::../description/coalesce.asciidoc[] +include::../types/coalesce.asciidoc[] +include::../examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/layout/concat.asciidoc b/docs/reference/esql/functions/layout/concat.asciidoc new file mode 100644 index 0000000000000..55dd8f8f49e1f --- /dev/null +++ b/docs/reference/esql/functions/layout/concat.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-concat]] +=== `CONCAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/concat.svg[Embedded,opts=inline] + +include::../parameters/concat.asciidoc[] +include::../description/concat.asciidoc[] +include::../types/concat.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cos.asciidoc b/docs/reference/esql/functions/layout/cos.asciidoc new file mode 100644 index 0000000000000..e781acead4015 --- /dev/null +++ b/docs/reference/esql/functions/layout/cos.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-cos]] +=== `COS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/cos.svg[Embedded,opts=inline] + +include::../parameters/cos.asciidoc[] +include::../description/cos.asciidoc[] +include::../types/cos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cosh.asciidoc b/docs/reference/esql/functions/layout/cosh.asciidoc new file mode 100644 index 0000000000000..27146ed18d629 --- /dev/null +++ b/docs/reference/esql/functions/layout/cosh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-cosh]] +=== `COSH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/cosh.svg[Embedded,opts=inline] + +include::../parameters/cosh.asciidoc[] +include::../description/cosh.asciidoc[] +include::../types/cosh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_diff.asciidoc b/docs/reference/esql/functions/layout/date_diff.asciidoc new file mode 100644 index 0000000000000..928fffd329960 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_diff.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-date_diff]] +=== `DATE_DIFF` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_diff.svg[Embedded,opts=inline] + +include::../parameters/date_diff.asciidoc[] +include::../description/date_diff.asciidoc[] +include::../types/date_diff.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_extract.asciidoc b/docs/reference/esql/functions/layout/date_extract.asciidoc new file mode 100644 index 0000000000000..419179af93621 --- /dev/null +++ b/docs/reference/esql/functions/layout/date_extract.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-date_extract]] +=== `DATE_EXTRACT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_extract.svg[Embedded,opts=inline] + +include::../parameters/date_extract.asciidoc[] +include::../description/date_extract.asciidoc[] +include::../types/date_extract.asciidoc[] diff --git a/docs/reference/esql/functions/layout/date_parse.asciidoc b/docs/reference/esql/functions/layout/date_parse.asciidoc new file mode 100644 index 0000000000000..2bb82cd47c59e --- /dev/null +++ b/docs/reference/esql/functions/layout/date_parse.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-date_parse]] +=== `DATE_PARSE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_parse.svg[Embedded,opts=inline] + +include::../parameters/date_parse.asciidoc[] +include::../description/date_parse.asciidoc[] +include::../types/date_parse.asciidoc[] diff --git a/docs/reference/esql/functions/layout/e.asciidoc b/docs/reference/esql/functions/layout/e.asciidoc new file mode 100644 index 0000000000000..89b1ad06a5f11 --- /dev/null +++ b/docs/reference/esql/functions/layout/e.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-e]] +=== `E` + +*Syntax* + +[.text-center] +image::esql/functions/signature/e.svg[Embedded,opts=inline] + +include::../parameters/e.asciidoc[] +include::../description/e.asciidoc[] +include::../types/e.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ends_with.asciidoc b/docs/reference/esql/functions/layout/ends_with.asciidoc new file mode 100644 index 0000000000000..85828298acef6 --- /dev/null +++ b/docs/reference/esql/functions/layout/ends_with.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-ends_with]] +=== `ENDS_WITH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ends_with.svg[Embedded,opts=inline] + +include::../parameters/ends_with.asciidoc[] +include::../description/ends_with.asciidoc[] +include::../types/ends_with.asciidoc[] diff --git a/docs/reference/esql/functions/layout/floor.asciidoc b/docs/reference/esql/functions/layout/floor.asciidoc new file mode 100644 index 0000000000000..e51fa1ac1524c --- /dev/null +++ b/docs/reference/esql/functions/layout/floor.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-floor]] +=== `FLOOR` + +*Syntax* + +[.text-center] +image::esql/functions/signature/floor.svg[Embedded,opts=inline] + +include::../parameters/floor.asciidoc[] +include::../description/floor.asciidoc[] +include::../types/floor.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc new file mode 100644 index 0000000000000..a2a54963354da --- /dev/null +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-greatest]] +=== `GREATEST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/greatest.svg[Embedded,opts=inline] + +include::../parameters/greatest.asciidoc[] +include::../description/greatest.asciidoc[] +include::../types/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc new file mode 100644 index 0000000000000..9a220289f3d44 --- /dev/null +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-least]] +=== `LEAST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/least.svg[Embedded,opts=inline] + +include::../parameters/least.asciidoc[] +include::../description/least.asciidoc[] +include::../types/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/left.asciidoc b/docs/reference/esql/functions/layout/left.asciidoc new file mode 100644 index 0000000000000..4e825abf3e50e --- /dev/null +++ b/docs/reference/esql/functions/layout/left.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-left]] +=== `LEFT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/left.svg[Embedded,opts=inline] + +include::../parameters/left.asciidoc[] +include::../description/left.asciidoc[] +include::../types/left.asciidoc[] +include::../examples/left.asciidoc[] diff --git a/docs/reference/esql/functions/layout/length.asciidoc b/docs/reference/esql/functions/layout/length.asciidoc new file mode 100644 index 0000000000000..b11b047caf2ed --- /dev/null +++ b/docs/reference/esql/functions/layout/length.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-length]] +=== `LENGTH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/length.svg[Embedded,opts=inline] + +include::../parameters/length.asciidoc[] +include::../description/length.asciidoc[] +include::../types/length.asciidoc[] diff --git a/docs/reference/esql/functions/layout/log.asciidoc b/docs/reference/esql/functions/layout/log.asciidoc new file mode 100644 index 0000000000000..4f0bb5b6527a3 --- /dev/null +++ b/docs/reference/esql/functions/layout/log.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-log]] +=== `LOG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/log.svg[Embedded,opts=inline] + +include::../parameters/log.asciidoc[] +include::../description/log.asciidoc[] +include::../types/log.asciidoc[] diff --git a/docs/reference/esql/functions/layout/log10.asciidoc b/docs/reference/esql/functions/layout/log10.asciidoc new file mode 100644 index 0000000000000..6732377e81fdd --- /dev/null +++ b/docs/reference/esql/functions/layout/log10.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-log10]] +=== `LOG10` + +*Syntax* + +[.text-center] +image::esql/functions/signature/log10.svg[Embedded,opts=inline] + +include::../parameters/log10.asciidoc[] +include::../description/log10.asciidoc[] +include::../types/log10.asciidoc[] diff --git a/docs/reference/esql/functions/layout/ltrim.asciidoc b/docs/reference/esql/functions/layout/ltrim.asciidoc new file mode 100644 index 0000000000000..a6c7bbe2330cb --- /dev/null +++ b/docs/reference/esql/functions/layout/ltrim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-ltrim]] +=== `LTRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/ltrim.svg[Embedded,opts=inline] + +include::../parameters/ltrim.asciidoc[] +include::../description/ltrim.asciidoc[] +include::../types/ltrim.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_avg.asciidoc b/docs/reference/esql/functions/layout/mv_avg.asciidoc new file mode 100644 index 0000000000000..0f0e49298c414 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_avg.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_avg]] +=== `MV_AVG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_avg.svg[Embedded,opts=inline] + +include::../parameters/mv_avg.asciidoc[] +include::../description/mv_avg.asciidoc[] +include::../types/mv_avg.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_concat.asciidoc b/docs/reference/esql/functions/layout/mv_concat.asciidoc new file mode 100644 index 0000000000000..e5cfc2b1ba74c --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_concat.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_concat]] +=== `MV_CONCAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_concat.svg[Embedded,opts=inline] + +include::../parameters/mv_concat.asciidoc[] +include::../description/mv_concat.asciidoc[] +include::../types/mv_concat.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_count.asciidoc b/docs/reference/esql/functions/layout/mv_count.asciidoc new file mode 100644 index 0000000000000..a575452aa24df --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_count.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_count]] +=== `MV_COUNT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_count.svg[Embedded,opts=inline] + +include::../parameters/mv_count.asciidoc[] +include::../description/mv_count.asciidoc[] +include::../types/mv_count.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_dedupe.asciidoc b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..ed45c0d511e53 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_dedupe]] +=== `MV_DEDUPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_dedupe.svg[Embedded,opts=inline] + +include::../parameters/mv_dedupe.asciidoc[] +include::../description/mv_dedupe.asciidoc[] +include::../types/mv_dedupe.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_first.asciidoc b/docs/reference/esql/functions/layout/mv_first.asciidoc new file mode 100644 index 0000000000000..6f6ed924c5496 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_first.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_first]] +=== `MV_FIRST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_first.svg[Embedded,opts=inline] + +include::../parameters/mv_first.asciidoc[] +include::../description/mv_first.asciidoc[] +include::../types/mv_first.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_last.asciidoc b/docs/reference/esql/functions/layout/mv_last.asciidoc new file mode 100644 index 0000000000000..6e65a3ebb17b4 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_last.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_last]] +=== `MV_LAST` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_last.svg[Embedded,opts=inline] + +include::../parameters/mv_last.asciidoc[] +include::../description/mv_last.asciidoc[] +include::../types/mv_last.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_max.asciidoc b/docs/reference/esql/functions/layout/mv_max.asciidoc new file mode 100644 index 0000000000000..c687d68fda8f1 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_max.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_max]] +=== `MV_MAX` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_max.svg[Embedded,opts=inline] + +include::../parameters/mv_max.asciidoc[] +include::../description/mv_max.asciidoc[] +include::../types/mv_max.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_median.asciidoc b/docs/reference/esql/functions/layout/mv_median.asciidoc new file mode 100644 index 0000000000000..ad131ccbb6e53 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_median.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_median]] +=== `MV_MEDIAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_median.svg[Embedded,opts=inline] + +include::../parameters/mv_median.asciidoc[] +include::../description/mv_median.asciidoc[] +include::../types/mv_median.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_min.asciidoc b/docs/reference/esql/functions/layout/mv_min.asciidoc new file mode 100644 index 0000000000000..52a1a1ec6091d --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_min.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_min]] +=== `MV_MIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_min.svg[Embedded,opts=inline] + +include::../parameters/mv_min.asciidoc[] +include::../description/mv_min.asciidoc[] +include::../types/mv_min.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_slice.asciidoc b/docs/reference/esql/functions/layout/mv_slice.asciidoc new file mode 100644 index 0000000000000..a9dff4c77dd54 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_slice.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_slice]] +=== `MV_SLICE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_slice.svg[Embedded,opts=inline] + +include::../parameters/mv_slice.asciidoc[] +include::../description/mv_slice.asciidoc[] +include::../types/mv_slice.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_sort.asciidoc b/docs/reference/esql/functions/layout/mv_sort.asciidoc new file mode 100644 index 0000000000000..d7822e6356106 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_sort.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_sort]] +=== `MV_SORT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_sort.svg[Embedded,opts=inline] + +include::../parameters/mv_sort.asciidoc[] +include::../description/mv_sort.asciidoc[] +include::../types/mv_sort.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_sum.asciidoc b/docs/reference/esql/functions/layout/mv_sum.asciidoc new file mode 100644 index 0000000000000..df0830c83a2eb --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_sum.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_sum]] +=== `MV_SUM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_sum.svg[Embedded,opts=inline] + +include::../parameters/mv_sum.asciidoc[] +include::../description/mv_sum.asciidoc[] +include::../types/mv_sum.asciidoc[] diff --git a/docs/reference/esql/functions/layout/mv_zip.asciidoc b/docs/reference/esql/functions/layout/mv_zip.asciidoc new file mode 100644 index 0000000000000..8ce6c94dc6585 --- /dev/null +++ b/docs/reference/esql/functions/layout/mv_zip.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-mv_zip]] +=== `MV_ZIP` + +*Syntax* + +[.text-center] +image::esql/functions/signature/mv_zip.svg[Embedded,opts=inline] + +include::../parameters/mv_zip.asciidoc[] +include::../description/mv_zip.asciidoc[] +include::../types/mv_zip.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pi.asciidoc b/docs/reference/esql/functions/layout/pi.asciidoc new file mode 100644 index 0000000000000..fc6e549af9f4b --- /dev/null +++ b/docs/reference/esql/functions/layout/pi.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-pi]] +=== `PI` + +*Syntax* + +[.text-center] +image::esql/functions/signature/pi.svg[Embedded,opts=inline] + +include::../parameters/pi.asciidoc[] +include::../description/pi.asciidoc[] +include::../types/pi.asciidoc[] diff --git a/docs/reference/esql/functions/layout/pow.asciidoc b/docs/reference/esql/functions/layout/pow.asciidoc new file mode 100644 index 0000000000000..c91944acc66bc --- /dev/null +++ b/docs/reference/esql/functions/layout/pow.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-pow]] +=== `POW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/pow.svg[Embedded,opts=inline] + +include::../parameters/pow.asciidoc[] +include::../description/pow.asciidoc[] +include::../types/pow.asciidoc[] diff --git a/docs/reference/esql/functions/layout/replace.asciidoc b/docs/reference/esql/functions/layout/replace.asciidoc new file mode 100644 index 0000000000000..bd060aa4e6d0d --- /dev/null +++ b/docs/reference/esql/functions/layout/replace.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-replace]] +=== `REPLACE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/replace.svg[Embedded,opts=inline] + +include::../parameters/replace.asciidoc[] +include::../description/replace.asciidoc[] +include::../types/replace.asciidoc[] diff --git a/docs/reference/esql/functions/layout/right.asciidoc b/docs/reference/esql/functions/layout/right.asciidoc new file mode 100644 index 0000000000000..d8af40a3e9b19 --- /dev/null +++ b/docs/reference/esql/functions/layout/right.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-right]] +=== `RIGHT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/right.svg[Embedded,opts=inline] + +include::../parameters/right.asciidoc[] +include::../description/right.asciidoc[] +include::../types/right.asciidoc[] diff --git a/docs/reference/esql/functions/layout/round.asciidoc b/docs/reference/esql/functions/layout/round.asciidoc new file mode 100644 index 0000000000000..815ba1f9a7fe7 --- /dev/null +++ b/docs/reference/esql/functions/layout/round.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-round]] +=== `ROUND` + +*Syntax* + +[.text-center] +image::esql/functions/signature/round.svg[Embedded,opts=inline] + +include::../parameters/round.asciidoc[] +include::../description/round.asciidoc[] +include::../types/round.asciidoc[] diff --git a/docs/reference/esql/functions/layout/rtrim.asciidoc b/docs/reference/esql/functions/layout/rtrim.asciidoc new file mode 100644 index 0000000000000..e15b41479d2e2 --- /dev/null +++ b/docs/reference/esql/functions/layout/rtrim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-rtrim]] +=== `RTRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/rtrim.svg[Embedded,opts=inline] + +include::../parameters/rtrim.asciidoc[] +include::../description/rtrim.asciidoc[] +include::../types/rtrim.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sin.asciidoc b/docs/reference/esql/functions/layout/sin.asciidoc new file mode 100644 index 0000000000000..7b45fcf72c38e --- /dev/null +++ b/docs/reference/esql/functions/layout/sin.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-sin]] +=== `SIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sin.svg[Embedded,opts=inline] + +include::../parameters/sin.asciidoc[] +include::../description/sin.asciidoc[] +include::../types/sin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sinh.asciidoc b/docs/reference/esql/functions/layout/sinh.asciidoc new file mode 100644 index 0000000000000..9ce60af6ed968 --- /dev/null +++ b/docs/reference/esql/functions/layout/sinh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-sinh]] +=== `SINH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sinh.svg[Embedded,opts=inline] + +include::../parameters/sinh.asciidoc[] +include::../description/sinh.asciidoc[] +include::../types/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/split.asciidoc b/docs/reference/esql/functions/layout/split.asciidoc new file mode 100644 index 0000000000000..cff612ec5351e --- /dev/null +++ b/docs/reference/esql/functions/layout/split.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-split]] +=== `SPLIT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/split.svg[Embedded,opts=inline] + +include::../parameters/split.asciidoc[] +include::../description/split.asciidoc[] +include::../types/split.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sqrt.asciidoc b/docs/reference/esql/functions/layout/sqrt.asciidoc new file mode 100644 index 0000000000000..a14186425841d --- /dev/null +++ b/docs/reference/esql/functions/layout/sqrt.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-sqrt]] +=== `SQRT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/sqrt.svg[Embedded,opts=inline] + +include::../parameters/sqrt.asciidoc[] +include::../description/sqrt.asciidoc[] +include::../types/sqrt.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc new file mode 100644 index 0000000000000..6ed1ae2b83e91 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-st_x]] +=== `ST_X` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_x.svg[Embedded,opts=inline] + +include::../parameters/st_x.asciidoc[] +include::../description/st_x.asciidoc[] +include::../types/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc new file mode 100644 index 0000000000000..7b4e585611294 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-st_y]] +=== `ST_Y` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_y.svg[Embedded,opts=inline] + +include::../parameters/st_y.asciidoc[] +include::../description/st_y.asciidoc[] +include::../types/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/layout/starts_with.asciidoc b/docs/reference/esql/functions/layout/starts_with.asciidoc new file mode 100644 index 0000000000000..80fccbb1fc0b1 --- /dev/null +++ b/docs/reference/esql/functions/layout/starts_with.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-starts_with]] +=== `STARTS_WITH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/starts_with.svg[Embedded,opts=inline] + +include::../parameters/starts_with.asciidoc[] +include::../description/starts_with.asciidoc[] +include::../types/starts_with.asciidoc[] diff --git a/docs/reference/esql/functions/layout/substring.asciidoc b/docs/reference/esql/functions/layout/substring.asciidoc new file mode 100644 index 0000000000000..343344a1418dd --- /dev/null +++ b/docs/reference/esql/functions/layout/substring.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-substring]] +=== `SUBSTRING` + +*Syntax* + +[.text-center] +image::esql/functions/signature/substring.svg[Embedded,opts=inline] + +include::../parameters/substring.asciidoc[] +include::../description/substring.asciidoc[] +include::../types/substring.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tan.asciidoc b/docs/reference/esql/functions/layout/tan.asciidoc new file mode 100644 index 0000000000000..d304b2bd10e86 --- /dev/null +++ b/docs/reference/esql/functions/layout/tan.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-tan]] +=== `TAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tan.svg[Embedded,opts=inline] + +include::../parameters/tan.asciidoc[] +include::../description/tan.asciidoc[] +include::../types/tan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tanh.asciidoc b/docs/reference/esql/functions/layout/tanh.asciidoc new file mode 100644 index 0000000000000..ab9213e024737 --- /dev/null +++ b/docs/reference/esql/functions/layout/tanh.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-tanh]] +=== `TANH` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tanh.svg[Embedded,opts=inline] + +include::../parameters/tanh.asciidoc[] +include::../description/tanh.asciidoc[] +include::../types/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tau.asciidoc b/docs/reference/esql/functions/layout/tau.asciidoc new file mode 100644 index 0000000000000..542c5db19e4e4 --- /dev/null +++ b/docs/reference/esql/functions/layout/tau.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-tau]] +=== `TAU` + +*Syntax* + +[.text-center] +image::esql/functions/signature/tau.svg[Embedded,opts=inline] + +include::../parameters/tau.asciidoc[] +include::../description/tau.asciidoc[] +include::../types/tau.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_boolean.asciidoc b/docs/reference/esql/functions/layout/to_boolean.asciidoc new file mode 100644 index 0000000000000..cc0dafd0fef23 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_boolean.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_boolean]] +=== `TO_BOOLEAN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_boolean.svg[Embedded,opts=inline] + +include::../parameters/to_boolean.asciidoc[] +include::../description/to_boolean.asciidoc[] +include::../types/to_boolean.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..22494dd785265 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_cartesianpoint]] +=== `TO_CARTESIANPOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_cartesianpoint.svg[Embedded,opts=inline] + +include::../parameters/to_cartesianpoint.asciidoc[] +include::../description/to_cartesianpoint.asciidoc[] +include::../types/to_cartesianpoint.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..04f5a821c4917 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_cartesianshape]] +=== `TO_CARTESIANSHAPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_cartesianshape.svg[Embedded,opts=inline] + +include::../parameters/to_cartesianshape.asciidoc[] +include::../description/to_cartesianshape.asciidoc[] +include::../types/to_cartesianshape.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_datetime.asciidoc b/docs/reference/esql/functions/layout/to_datetime.asciidoc new file mode 100644 index 0000000000000..7815f4b918631 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_datetime.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_datetime]] +=== `TO_DATETIME` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_datetime.svg[Embedded,opts=inline] + +include::../parameters/to_datetime.asciidoc[] +include::../description/to_datetime.asciidoc[] +include::../types/to_datetime.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_degrees.asciidoc b/docs/reference/esql/functions/layout/to_degrees.asciidoc new file mode 100644 index 0000000000000..745ed68cda843 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_degrees.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_degrees]] +=== `TO_DEGREES` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_degrees.svg[Embedded,opts=inline] + +include::../parameters/to_degrees.asciidoc[] +include::../description/to_degrees.asciidoc[] +include::../types/to_degrees.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_double.asciidoc b/docs/reference/esql/functions/layout/to_double.asciidoc new file mode 100644 index 0000000000000..fd8cd7a3f51b2 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_double.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_double]] +=== `TO_DOUBLE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_double.svg[Embedded,opts=inline] + +include::../parameters/to_double.asciidoc[] +include::../description/to_double.asciidoc[] +include::../types/to_double.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_geopoint.asciidoc b/docs/reference/esql/functions/layout/to_geopoint.asciidoc new file mode 100644 index 0000000000000..7dabdf7975617 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_geopoint.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_geopoint]] +=== `TO_GEOPOINT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_geopoint.svg[Embedded,opts=inline] + +include::../parameters/to_geopoint.asciidoc[] +include::../description/to_geopoint.asciidoc[] +include::../types/to_geopoint.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_geoshape.asciidoc b/docs/reference/esql/functions/layout/to_geoshape.asciidoc new file mode 100644 index 0000000000000..550d5fb47d846 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_geoshape.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_geoshape]] +=== `TO_GEOSHAPE` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_geoshape.svg[Embedded,opts=inline] + +include::../parameters/to_geoshape.asciidoc[] +include::../description/to_geoshape.asciidoc[] +include::../types/to_geoshape.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_integer.asciidoc b/docs/reference/esql/functions/layout/to_integer.asciidoc new file mode 100644 index 0000000000000..32ddd388693fc --- /dev/null +++ b/docs/reference/esql/functions/layout/to_integer.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_integer]] +=== `TO_INTEGER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_integer.svg[Embedded,opts=inline] + +include::../parameters/to_integer.asciidoc[] +include::../description/to_integer.asciidoc[] +include::../types/to_integer.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_ip.asciidoc b/docs/reference/esql/functions/layout/to_ip.asciidoc new file mode 100644 index 0000000000000..3e6b54d2aa87a --- /dev/null +++ b/docs/reference/esql/functions/layout/to_ip.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_ip]] +=== `TO_IP` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_ip.svg[Embedded,opts=inline] + +include::../parameters/to_ip.asciidoc[] +include::../description/to_ip.asciidoc[] +include::../types/to_ip.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_long.asciidoc b/docs/reference/esql/functions/layout/to_long.asciidoc new file mode 100644 index 0000000000000..4d618e675d74b --- /dev/null +++ b/docs/reference/esql/functions/layout/to_long.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_long]] +=== `TO_LONG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_long.svg[Embedded,opts=inline] + +include::../parameters/to_long.asciidoc[] +include::../description/to_long.asciidoc[] +include::../types/to_long.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_lower.asciidoc b/docs/reference/esql/functions/layout/to_lower.asciidoc new file mode 100644 index 0000000000000..fc6202591cae5 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_lower.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_lower]] +=== `TO_LOWER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_lower.svg[Embedded,opts=inline] + +include::../parameters/to_lower.asciidoc[] +include::../description/to_lower.asciidoc[] +include::../types/to_lower.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_radians.asciidoc b/docs/reference/esql/functions/layout/to_radians.asciidoc new file mode 100644 index 0000000000000..e2f45dbe166ff --- /dev/null +++ b/docs/reference/esql/functions/layout/to_radians.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_radians]] +=== `TO_RADIANS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_radians.svg[Embedded,opts=inline] + +include::../parameters/to_radians.asciidoc[] +include::../description/to_radians.asciidoc[] +include::../types/to_radians.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_string.asciidoc b/docs/reference/esql/functions/layout/to_string.asciidoc new file mode 100644 index 0000000000000..f9d8a12e9a5cb --- /dev/null +++ b/docs/reference/esql/functions/layout/to_string.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_string]] +=== `TO_STRING` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_string.svg[Embedded,opts=inline] + +include::../parameters/to_string.asciidoc[] +include::../description/to_string.asciidoc[] +include::../types/to_string.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..093c98486ac02 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_unsigned_long]] +=== `TO_UNSIGNED_LONG` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_unsigned_long.svg[Embedded,opts=inline] + +include::../parameters/to_unsigned_long.asciidoc[] +include::../description/to_unsigned_long.asciidoc[] +include::../types/to_unsigned_long.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_upper.asciidoc b/docs/reference/esql/functions/layout/to_upper.asciidoc new file mode 100644 index 0000000000000..f23b8a85bce37 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_upper.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_upper]] +=== `TO_UPPER` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_upper.svg[Embedded,opts=inline] + +include::../parameters/to_upper.asciidoc[] +include::../description/to_upper.asciidoc[] +include::../types/to_upper.asciidoc[] diff --git a/docs/reference/esql/functions/layout/to_version.asciidoc b/docs/reference/esql/functions/layout/to_version.asciidoc new file mode 100644 index 0000000000000..919749eb7b0e2 --- /dev/null +++ b/docs/reference/esql/functions/layout/to_version.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-to_version]] +=== `TO_VERSION` + +*Syntax* + +[.text-center] +image::esql/functions/signature/to_version.svg[Embedded,opts=inline] + +include::../parameters/to_version.asciidoc[] +include::../description/to_version.asciidoc[] +include::../types/to_version.asciidoc[] diff --git a/docs/reference/esql/functions/layout/trim.asciidoc b/docs/reference/esql/functions/layout/trim.asciidoc new file mode 100644 index 0000000000000..b93bdf013fd32 --- /dev/null +++ b/docs/reference/esql/functions/layout/trim.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-trim]] +=== `TRIM` + +*Syntax* + +[.text-center] +image::esql/functions/signature/trim.svg[Embedded,opts=inline] + +include::../parameters/trim.asciidoc[] +include::../description/trim.asciidoc[] +include::../types/trim.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc index 41f58b0d415c2..2860eb31090c4 100644 --- a/docs/reference/esql/functions/least.asciidoc +++ b/docs/reference/esql/functions/least.asciidoc @@ -24,8 +24,6 @@ NOTE: When run on `keyword` or `text` fields, this returns the first string in alphabetical order. When run on `boolean` columns this will return `false` if any values are `false`. -*Supported types* - include::types/least.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/left.asciidoc b/docs/reference/esql/functions/left.asciidoc deleted file mode 100644 index 5d666656b1ee4..0000000000000 --- a/docs/reference/esql/functions/left.asciidoc +++ /dev/null @@ -1,36 +0,0 @@ -[discrete] -[[esql-left]] -=== `LEFT` - -*Syntax* - -[.text-center] -image::esql/functions/signature/left.svg[Embedded,opts=inline] - -*Parameters* - -`str`:: -The string from which to return a substring. - -`length`:: -The number of characters to return. - -*Description* - -Returns the substring that extracts 'length' chars from 'str' starting -from the left. - -*Supported types* - -include::types/left.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/string.csv-spec[tag=left] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/string.csv-spec[tag=left-result] -|=== diff --git a/docs/reference/esql/functions/log.asciidoc b/docs/reference/esql/functions/log.asciidoc index 79ea72898bc2f..b1470e50f2881 100644 --- a/docs/reference/esql/functions/log.asciidoc +++ b/docs/reference/esql/functions/log.asciidoc @@ -23,8 +23,6 @@ Returns the logarithm of a value to a base. The input can be any numeric value, Logs of zero, negative numbers, infinites and base of one return `null` as well as a warning. -*Supported types* - include::types/log.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/log10.asciidoc b/docs/reference/esql/functions/log10.asciidoc index d806da3173818..f0c16f3e4769e 100644 --- a/docs/reference/esql/functions/log10.asciidoc +++ b/docs/reference/esql/functions/log10.asciidoc @@ -17,8 +17,6 @@ value is always a double. Logs of 0, negative numbers, and infinites return `null` as well as a warning. -*Supported types* - include::types/log10.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/ltrim.asciidoc b/docs/reference/esql/functions/ltrim.asciidoc index 4b7b619d06afc..8fb6b6b1b6e25 100644 --- a/docs/reference/esql/functions/ltrim.asciidoc +++ b/docs/reference/esql/functions/ltrim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes leading whitespaces from strings. -*Supported types* - include::types/rtrim.asciidoc[] *Example* @@ -29,4 +27,4 @@ include::{esql-specs}/string.csv-spec[tag=ltrim] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/string.csv-spec[tag=ltrim-result] -|=== \ No newline at end of file +|=== diff --git a/docs/reference/esql/functions/mv_avg.asciidoc b/docs/reference/esql/functions/mv_avg.asciidoc index 27fa2542a8b8f..c81574beed376 100644 --- a/docs/reference/esql/functions/mv_avg.asciidoc +++ b/docs/reference/esql/functions/mv_avg.asciidoc @@ -19,8 +19,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the average of all of the values. -*Supported types* - include::types/mv_avg.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_concat.asciidoc b/docs/reference/esql/functions/mv_concat.asciidoc index e42cc84d62b15..b5ad13cbe3619 100644 --- a/docs/reference/esql/functions/mv_concat.asciidoc +++ b/docs/reference/esql/functions/mv_concat.asciidoc @@ -20,8 +20,6 @@ Delimiter. Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter. -*Supported types* - include::types/mv_concat.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_count.asciidoc b/docs/reference/esql/functions/mv_count.asciidoc index 0545335556030..ac870cf77605d 100644 --- a/docs/reference/esql/functions/mv_count.asciidoc +++ b/docs/reference/esql/functions/mv_count.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing a count of the number of values. -*Supported types* - include::types/mv_count.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_dedupe.asciidoc b/docs/reference/esql/functions/mv_dedupe.asciidoc index 09b3827c45e45..84def0127f0ac 100644 --- a/docs/reference/esql/functions/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/mv_dedupe.asciidoc @@ -18,8 +18,6 @@ Removes duplicates from a multivalue expression. NOTE: `MV_DEDUPE` may, but won't always, sort the values in the column. -*Supported types* - include::types/mv_dedupe.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_first.asciidoc b/docs/reference/esql/functions/mv_first.asciidoc index 13d21b15f958e..115e8e69f2a3c 100644 --- a/docs/reference/esql/functions/mv_first.asciidoc +++ b/docs/reference/esql/functions/mv_first.asciidoc @@ -24,8 +24,6 @@ rely on that. If you need the minimum value use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a performance benefit to `MV_FIRST`. -*Supported types* - include::types/mv_first.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_last.asciidoc b/docs/reference/esql/functions/mv_last.asciidoc index ee6a4a8fed8ba..7843009b74249 100644 --- a/docs/reference/esql/functions/mv_last.asciidoc +++ b/docs/reference/esql/functions/mv_last.asciidoc @@ -24,8 +24,6 @@ rely on that. If you need the maximum value use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a performance benefit to `MV_LAST`. -*Supported types* - include::types/mv_last.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_max.asciidoc b/docs/reference/esql/functions/mv_max.asciidoc index e13e61e0d123d..c915ce5d2e603 100644 --- a/docs/reference/esql/functions/mv_max.asciidoc +++ b/docs/reference/esql/functions/mv_max.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the maximum value. -*Supported types* - include::types/mv_max.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_median.asciidoc b/docs/reference/esql/functions/mv_median.asciidoc index 05c54342c0f74..44f955e20e1cb 100644 --- a/docs/reference/esql/functions/mv_median.asciidoc +++ b/docs/reference/esql/functions/mv_median.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued column into a single valued column containing the median value. -*Supported types* - include::types/mv_median.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_min.asciidoc b/docs/reference/esql/functions/mv_min.asciidoc index b851f480fd619..1965d3de52781 100644 --- a/docs/reference/esql/functions/mv_min.asciidoc +++ b/docs/reference/esql/functions/mv_min.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued expression into a single valued column containing the minimum value. -*Supported types* - include::types/mv_min.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/mv_slice.asciidoc b/docs/reference/esql/functions/mv_slice.asciidoc index f4431b25232a2..65436392fcf4e 100644 --- a/docs/reference/esql/functions/mv_slice.asciidoc +++ b/docs/reference/esql/functions/mv_slice.asciidoc @@ -22,8 +22,6 @@ End position. Optional; if omitted, the position at `start` is returned. The end Returns a subset of the multivalued field using the start and end index values. -*Supported types* - include::types/mv_slice.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_sort.asciidoc b/docs/reference/esql/functions/mv_sort.asciidoc index abe09989fbac5..2df9a8c01ca62 100644 --- a/docs/reference/esql/functions/mv_sort.asciidoc +++ b/docs/reference/esql/functions/mv_sort.asciidoc @@ -19,8 +19,6 @@ Sort order. The valid options are ASC and DESC, the default is ASC. Sorts a multivalue expression in lexicographical order. -*Supported types* - include::types/mv_sort.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_sum.asciidoc b/docs/reference/esql/functions/mv_sum.asciidoc index bc252bc9d3fa0..56f9565097a00 100644 --- a/docs/reference/esql/functions/mv_sum.asciidoc +++ b/docs/reference/esql/functions/mv_sum.asciidoc @@ -17,8 +17,6 @@ Multivalue expression. Converts a multivalued column into a single valued column containing the sum of all of the values. -*Supported types* - include::types/mv_sum.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/mv_zip.asciidoc b/docs/reference/esql/functions/mv_zip.asciidoc index 4e71e2cafb9c4..0fc30fb91a737 100644 --- a/docs/reference/esql/functions/mv_zip.asciidoc +++ b/docs/reference/esql/functions/mv_zip.asciidoc @@ -22,8 +22,6 @@ Delimiter. Optional; if omitted, `,` is used as a default delimiter. Combines the values from two multivalued fields with a delimiter that joins them together. -*Supported types* - include::types/mv_zip.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/parameters/abs.asciidoc b/docs/reference/esql/functions/parameters/abs.asciidoc new file mode 100644 index 0000000000000..5e41a08473a4e --- /dev/null +++ b/docs/reference/esql/functions/parameters/abs.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: + diff --git a/docs/reference/esql/functions/parameters/acos.asciidoc b/docs/reference/esql/functions/parameters/acos.asciidoc new file mode 100644 index 0000000000000..02089d079f6e5 --- /dev/null +++ b/docs/reference/esql/functions/parameters/acos.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +Number between -1 and 1 diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc new file mode 100644 index 0000000000000..02089d079f6e5 --- /dev/null +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +Number between -1 and 1 diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc new file mode 100644 index 0000000000000..c76d5ab2b5d98 --- /dev/null +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +A number diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc new file mode 100644 index 0000000000000..ab5921fe2482a --- /dev/null +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`y`:: +y coordinate + +`x`:: +x coordinate diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc new file mode 100644 index 0000000000000..0f9c6a1b81c99 --- /dev/null +++ b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc @@ -0,0 +1,13 @@ +*Parameters* + +`field`:: + + +`buckets`:: + + +`from`:: + + +`to`:: + diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc new file mode 100644 index 0000000000000..8c24eef308860 --- /dev/null +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`condition`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc new file mode 100644 index 0000000000000..5e41a08473a4e --- /dev/null +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: + diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc new file mode 100644 index 0000000000000..3a3a8ac917984 --- /dev/null +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`expression`:: +Expression to evaluate + +`expressionX`:: +Other expression to evaluate diff --git a/docs/reference/esql/functions/parameters/concat.asciidoc b/docs/reference/esql/functions/parameters/concat.asciidoc new file mode 100644 index 0000000000000..55c75eae0de74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/concat.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/cos.asciidoc b/docs/reference/esql/functions/parameters/cos.asciidoc new file mode 100644 index 0000000000000..6c943569ecd04 --- /dev/null +++ b/docs/reference/esql/functions/parameters/cos.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/cosh.asciidoc b/docs/reference/esql/functions/parameters/cosh.asciidoc new file mode 100644 index 0000000000000..65d9408644a82 --- /dev/null +++ b/docs/reference/esql/functions/parameters/cosh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +The number who's hyperbolic cosine is to be returned diff --git a/docs/reference/esql/functions/parameters/date_diff.asciidoc b/docs/reference/esql/functions/parameters/date_diff.asciidoc new file mode 100644 index 0000000000000..9a9ef6fb34fba --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_diff.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`unit`:: +A valid date unit + +`startTimestamp`:: +A string representing a start timestamp + +`endTimestamp`:: +A string representing an end timestamp diff --git a/docs/reference/esql/functions/parameters/date_extract.asciidoc b/docs/reference/esql/functions/parameters/date_extract.asciidoc new file mode 100644 index 0000000000000..7a2c2c4dcc9af --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_extract.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`date_part`:: +Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era. + +`field`:: +Date expression diff --git a/docs/reference/esql/functions/parameters/date_parse.asciidoc b/docs/reference/esql/functions/parameters/date_parse.asciidoc new file mode 100644 index 0000000000000..30a09e43c5361 --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_parse.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`datePattern`:: +A valid date pattern + +`dateString`:: +A string representing a date diff --git a/docs/reference/esql/functions/parameters/e.asciidoc b/docs/reference/esql/functions/parameters/e.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/e.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/ends_with.asciidoc b/docs/reference/esql/functions/parameters/ends_with.asciidoc new file mode 100644 index 0000000000000..314eec2bf39ea --- /dev/null +++ b/docs/reference/esql/functions/parameters/ends_with.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`suffix`:: + diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc new file mode 100644 index 0000000000000..5e41a08473a4e --- /dev/null +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: + diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc new file mode 100644 index 0000000000000..55c75eae0de74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc new file mode 100644 index 0000000000000..55c75eae0de74 --- /dev/null +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`first`:: + + +`rest`:: + diff --git a/docs/reference/esql/functions/parameters/left.asciidoc b/docs/reference/esql/functions/parameters/left.asciidoc new file mode 100644 index 0000000000000..df95257f4160a --- /dev/null +++ b/docs/reference/esql/functions/parameters/left.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: +The string from which to return a substring. + +`length`:: +The number of characters to return. diff --git a/docs/reference/esql/functions/parameters/length.asciidoc b/docs/reference/esql/functions/parameters/length.asciidoc new file mode 100644 index 0000000000000..4fb63948eceaa --- /dev/null +++ b/docs/reference/esql/functions/parameters/length.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: + diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc new file mode 100644 index 0000000000000..25292ef14d7da --- /dev/null +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`base`:: + + +`value`:: + diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc new file mode 100644 index 0000000000000..5e41a08473a4e --- /dev/null +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: + diff --git a/docs/reference/esql/functions/parameters/ltrim.asciidoc b/docs/reference/esql/functions/parameters/ltrim.asciidoc new file mode 100644 index 0000000000000..4fb63948eceaa --- /dev/null +++ b/docs/reference/esql/functions/parameters/ltrim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: + diff --git a/docs/reference/esql/functions/parameters/mv_avg.asciidoc b/docs/reference/esql/functions/parameters/mv_avg.asciidoc new file mode 100644 index 0000000000000..56df4f5138a27 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_avg.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`field`:: + diff --git a/docs/reference/esql/functions/parameters/mv_concat.asciidoc b/docs/reference/esql/functions/parameters/mv_concat.asciidoc new file mode 100644 index 0000000000000..c4b846f766d8c --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_concat.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`v`:: +values to join + +`delim`:: +delimiter diff --git a/docs/reference/esql/functions/parameters/mv_count.asciidoc b/docs/reference/esql/functions/parameters/mv_count.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_count.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_first.asciidoc b/docs/reference/esql/functions/parameters/mv_first.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_first.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_last.asciidoc b/docs/reference/esql/functions/parameters/mv_last.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_last.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_max.asciidoc b/docs/reference/esql/functions/parameters/mv_max.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_max.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_median.asciidoc b/docs/reference/esql/functions/parameters/mv_median.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_median.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_min.asciidoc b/docs/reference/esql/functions/parameters/mv_min.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_min.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_slice.asciidoc b/docs/reference/esql/functions/parameters/mv_slice.asciidoc new file mode 100644 index 0000000000000..83faf7c299fb2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_slice.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`v`:: +A multivalued field + +`start`:: +start index + +`end`:: +end index (included) diff --git a/docs/reference/esql/functions/parameters/mv_sort.asciidoc b/docs/reference/esql/functions/parameters/mv_sort.asciidoc new file mode 100644 index 0000000000000..aee8353cfd416 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_sort.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`field`:: +A multivalued field + +`order`:: +sort order diff --git a/docs/reference/esql/functions/parameters/mv_sum.asciidoc b/docs/reference/esql/functions/parameters/mv_sum.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_sum.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/mv_zip.asciidoc b/docs/reference/esql/functions/parameters/mv_zip.asciidoc new file mode 100644 index 0000000000000..592f119795cd9 --- /dev/null +++ b/docs/reference/esql/functions/parameters/mv_zip.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`mvLeft`:: +A multivalued field + +`mvRight`:: +A multivalued field + +`delim`:: +delimiter diff --git a/docs/reference/esql/functions/parameters/pi.asciidoc b/docs/reference/esql/functions/parameters/pi.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/pi.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/pow.asciidoc b/docs/reference/esql/functions/parameters/pow.asciidoc new file mode 100644 index 0000000000000..77b3dc186dac7 --- /dev/null +++ b/docs/reference/esql/functions/parameters/pow.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`base`:: + + +`exponent`:: + diff --git a/docs/reference/esql/functions/parameters/replace.asciidoc b/docs/reference/esql/functions/parameters/replace.asciidoc new file mode 100644 index 0000000000000..6180447b7067e --- /dev/null +++ b/docs/reference/esql/functions/parameters/replace.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`str`:: + + +`regex`:: + + +`newStr`:: + diff --git a/docs/reference/esql/functions/parameters/right.asciidoc b/docs/reference/esql/functions/parameters/right.asciidoc new file mode 100644 index 0000000000000..34d92a327e958 --- /dev/null +++ b/docs/reference/esql/functions/parameters/right.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`length`:: + diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc new file mode 100644 index 0000000000000..a6fe2eb73eba6 --- /dev/null +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`value`:: +The numeric value to round + +`decimals`:: +The number of decimal places to round to. Defaults to 0. diff --git a/docs/reference/esql/functions/parameters/rtrim.asciidoc b/docs/reference/esql/functions/parameters/rtrim.asciidoc new file mode 100644 index 0000000000000..4fb63948eceaa --- /dev/null +++ b/docs/reference/esql/functions/parameters/rtrim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: + diff --git a/docs/reference/esql/functions/parameters/sin.asciidoc b/docs/reference/esql/functions/parameters/sin.asciidoc new file mode 100644 index 0000000000000..6c943569ecd04 --- /dev/null +++ b/docs/reference/esql/functions/parameters/sin.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/sinh.asciidoc b/docs/reference/esql/functions/parameters/sinh.asciidoc new file mode 100644 index 0000000000000..b94854e0bcab6 --- /dev/null +++ b/docs/reference/esql/functions/parameters/sinh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +The number to return the hyperbolic sine of diff --git a/docs/reference/esql/functions/parameters/split.asciidoc b/docs/reference/esql/functions/parameters/split.asciidoc new file mode 100644 index 0000000000000..a6b9e26af34b1 --- /dev/null +++ b/docs/reference/esql/functions/parameters/split.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`delim`:: + diff --git a/docs/reference/esql/functions/parameters/sqrt.asciidoc b/docs/reference/esql/functions/parameters/sqrt.asciidoc new file mode 100644 index 0000000000000..5e41a08473a4e --- /dev/null +++ b/docs/reference/esql/functions/parameters/sqrt.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: + diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc new file mode 100644 index 0000000000000..d3d26fc981caf --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`point`:: + diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc new file mode 100644 index 0000000000000..d3d26fc981caf --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`point`:: + diff --git a/docs/reference/esql/functions/parameters/starts_with.asciidoc b/docs/reference/esql/functions/parameters/starts_with.asciidoc new file mode 100644 index 0000000000000..75558cad04106 --- /dev/null +++ b/docs/reference/esql/functions/parameters/starts_with.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`str`:: + + +`prefix`:: + diff --git a/docs/reference/esql/functions/parameters/substring.asciidoc b/docs/reference/esql/functions/parameters/substring.asciidoc new file mode 100644 index 0000000000000..1a689b56cff71 --- /dev/null +++ b/docs/reference/esql/functions/parameters/substring.asciidoc @@ -0,0 +1,10 @@ +*Parameters* + +`str`:: + + +`start`:: + + +`length`:: + diff --git a/docs/reference/esql/functions/parameters/tan.asciidoc b/docs/reference/esql/functions/parameters/tan.asciidoc new file mode 100644 index 0000000000000..6c943569ecd04 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tan.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +An angle, in radians diff --git a/docs/reference/esql/functions/parameters/tanh.asciidoc b/docs/reference/esql/functions/parameters/tanh.asciidoc new file mode 100644 index 0000000000000..7f78c35fd4cd2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tanh.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`n`:: +The number to return the hyperbolic tangent of diff --git a/docs/reference/esql/functions/parameters/tau.asciidoc b/docs/reference/esql/functions/parameters/tau.asciidoc new file mode 100644 index 0000000000000..ddb88c98f7503 --- /dev/null +++ b/docs/reference/esql/functions/parameters/tau.asciidoc @@ -0,0 +1 @@ +*Parameters* diff --git a/docs/reference/esql/functions/parameters/to_boolean.asciidoc b/docs/reference/esql/functions/parameters/to_boolean.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_boolean.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_datetime.asciidoc b/docs/reference/esql/functions/parameters/to_datetime.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_datetime.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_degrees.asciidoc b/docs/reference/esql/functions/parameters/to_degrees.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_degrees.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_double.asciidoc b/docs/reference/esql/functions/parameters/to_double.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_double.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_integer.asciidoc b/docs/reference/esql/functions/parameters/to_integer.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_integer.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_ip.asciidoc b/docs/reference/esql/functions/parameters/to_ip.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_ip.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_long.asciidoc b/docs/reference/esql/functions/parameters/to_long.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_long.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_lower.asciidoc b/docs/reference/esql/functions/parameters/to_lower.asciidoc new file mode 100644 index 0000000000000..4f2e56949be24 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_lower.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: +The input string diff --git a/docs/reference/esql/functions/parameters/to_radians.asciidoc b/docs/reference/esql/functions/parameters/to_radians.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_radians.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_string.asciidoc b/docs/reference/esql/functions/parameters/to_string.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_string.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/to_upper.asciidoc b/docs/reference/esql/functions/parameters/to_upper.asciidoc new file mode 100644 index 0000000000000..4f2e56949be24 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_upper.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: +The input string diff --git a/docs/reference/esql/functions/parameters/to_version.asciidoc b/docs/reference/esql/functions/parameters/to_version.asciidoc new file mode 100644 index 0000000000000..915b46e872870 --- /dev/null +++ b/docs/reference/esql/functions/parameters/to_version.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`v`:: + diff --git a/docs/reference/esql/functions/parameters/trim.asciidoc b/docs/reference/esql/functions/parameters/trim.asciidoc new file mode 100644 index 0000000000000..4fb63948eceaa --- /dev/null +++ b/docs/reference/esql/functions/parameters/trim.asciidoc @@ -0,0 +1,4 @@ +*Parameters* + +`str`:: + diff --git a/docs/reference/esql/functions/pow.asciidoc b/docs/reference/esql/functions/pow.asciidoc index 8c31bd21e8a46..6618b728d7da9 100644 --- a/docs/reference/esql/functions/pow.asciidoc +++ b/docs/reference/esql/functions/pow.asciidoc @@ -21,8 +21,6 @@ Returns the value of `base` raised to the power of `exponent`. Both arguments must be numeric. The output is always a double. Note that it is still possible to overflow a double result here; in that case, null will be returned. -*Supported types* - include::types/pow.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/replace.asciidoc b/docs/reference/esql/functions/replace.asciidoc index 05856829eb193..f56567c5150c8 100644 --- a/docs/reference/esql/functions/replace.asciidoc +++ b/docs/reference/esql/functions/replace.asciidoc @@ -25,8 +25,6 @@ The function substitutes in the string `str` any match of the regular expression If any of the arguments is `null`, the result is `null`. -*Supported types* - include::types/replace.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/right.asciidoc b/docs/reference/esql/functions/right.asciidoc index 1b291e53729ee..e9e4c2ebf3806 100644 --- a/docs/reference/esql/functions/right.asciidoc +++ b/docs/reference/esql/functions/right.asciidoc @@ -20,8 +20,6 @@ The number of characters to return. Return the substring that extracts 'length' chars from 'str' starting from the right. -*Supported types* - include::types/right.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/round.asciidoc b/docs/reference/esql/functions/round.asciidoc index 7f1285e85f664..e792db6c1ed69 100644 --- a/docs/reference/esql/functions/round.asciidoc +++ b/docs/reference/esql/functions/round.asciidoc @@ -20,8 +20,6 @@ Rounds a number to the closest number with the specified number of digits. Defaults to 0 digits if no number of digits is provided. If the specified number of digits is negative, rounds to the number of digits left of the decimal point. -*Supported types* - include::types/round.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/rtrim.asciidoc b/docs/reference/esql/functions/rtrim.asciidoc index 588b7b9fc5433..aead0cf88b898 100644 --- a/docs/reference/esql/functions/rtrim.asciidoc +++ b/docs/reference/esql/functions/rtrim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes trailing whitespaces from strings. -*Supported types* - include::types/rtrim.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc index e6a8e0cf9331f..6034a695c6071 100644 --- a/docs/reference/esql/functions/sin.asciidoc +++ b/docs/reference/esql/functions/sin.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Sine_and_cosine[Sine] trigonometric function. Input expected in radians. -*Supported types* - include::types/sin.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc index 683ae6962c2fd..0931b9a2b88e1 100644 --- a/docs/reference/esql/functions/sinh.asciidoc +++ b/docs/reference/esql/functions/sinh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Hyperbolic_functions[Sine] hyperbolic function. -*Supported types* - include::types/sinh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/split.asciidoc b/docs/reference/esql/functions/split.asciidoc index 0a4ce584d01da..972085ad36cc6 100644 --- a/docs/reference/esql/functions/split.asciidoc +++ b/docs/reference/esql/functions/split.asciidoc @@ -17,8 +17,6 @@ Delimiter. Only single byte delimiters are currently supported. Splits a single valued string into multiple strings. -*Supported types* - include::types/split.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/sqrt.asciidoc b/docs/reference/esql/functions/sqrt.asciidoc index faf504a6b0af4..e43d01e6cf814 100644 --- a/docs/reference/esql/functions/sqrt.asciidoc +++ b/docs/reference/esql/functions/sqrt.asciidoc @@ -20,8 +20,6 @@ return value is always a double. Square roots of negative numbers are NaN. Square roots of infinites are infinite. -*Supported types* - include::types/sqrt.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc index 0f40a66417f9f..692373f054d99 100644 --- a/docs/reference/esql/functions/st_x.asciidoc +++ b/docs/reference/esql/functions/st_x.asciidoc @@ -17,8 +17,6 @@ Expression of type `geo_point` or `cartesian_point`. If `null`, the function ret Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. -*Supported types* - include::types/st_x.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc index e876852228d83..dba9b3d450006 100644 --- a/docs/reference/esql/functions/st_y.asciidoc +++ b/docs/reference/esql/functions/st_y.asciidoc @@ -17,8 +17,6 @@ Expression of type `geo_point` or `cartesian_point`. If `null`, the function ret Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. -*Supported types* - include::types/st_y.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/starts_with.asciidoc b/docs/reference/esql/functions/starts_with.asciidoc index 4d45e89882400..6fbd6ca1f18e6 100644 --- a/docs/reference/esql/functions/starts_with.asciidoc +++ b/docs/reference/esql/functions/starts_with.asciidoc @@ -20,8 +20,6 @@ String expression. If `null`, the function returns `null`. Returns a boolean that indicates whether a keyword string starts with another string. -*Supported types* - include::types/starts_with.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index e9fe04ce15761..b568ae1061bb5 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -23,7 +23,7 @@ // end::string_list[] include::concat.asciidoc[] -include::left.asciidoc[] +include::layout/left.asciidoc[] include::length.asciidoc[] include::ltrim.asciidoc[] include::replace.asciidoc[] diff --git a/docs/reference/esql/functions/substring.asciidoc b/docs/reference/esql/functions/substring.asciidoc index 73df7a19aa6b7..82d1c361aa749 100644 --- a/docs/reference/esql/functions/substring.asciidoc +++ b/docs/reference/esql/functions/substring.asciidoc @@ -24,8 +24,6 @@ positions after `start` are returned. Returns a substring of a string, specified by a start position and an optional length. -*Supported types* - include::types/substring.asciidoc[] *Examples* diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc index cc06421616fc1..3b1c446806733 100644 --- a/docs/reference/esql/functions/tan.asciidoc +++ b/docs/reference/esql/functions/tan.asciidoc @@ -17,8 +17,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Sine_and_cosine[Tangent] trigonometric function. Input expected in radians. -*Supported types* - include::types/tan.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc index a21354d23ba50..9b47c68c19cf1 100644 --- a/docs/reference/esql/functions/tanh.asciidoc +++ b/docs/reference/esql/functions/tanh.asciidoc @@ -16,8 +16,6 @@ Numeric expression. If `null`, the function returns `null`. {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function. -*Supported types* - include::types/tanh.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/to_cartesianpoint.asciidoc index 223556d2c0e96..bb534d67f2754 100644 --- a/docs/reference/esql/functions/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/to_cartesianpoint.asciidoc @@ -21,8 +21,6 @@ Converts an input value to a `point` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. -*Supported types* - include::types/to_cartesianpoint.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_cartesianshape.asciidoc b/docs/reference/esql/functions/to_cartesianshape.asciidoc index 287d437b3906c..a7ad5351498a2 100644 --- a/docs/reference/esql/functions/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/to_cartesianshape.asciidoc @@ -22,8 +22,6 @@ Converts an input value to a `cartesian_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -*Supported types* - include::types/to_cartesianshape.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_geopoint.asciidoc b/docs/reference/esql/functions/to_geopoint.asciidoc index d4d7d397d8f7b..29514a24161d7 100644 --- a/docs/reference/esql/functions/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/to_geopoint.asciidoc @@ -19,8 +19,6 @@ The input type must be a string or a `geo_point`. Converts an input value to a `geo_point` value. -*Supported types* - include::types/to_geopoint.asciidoc[] A string will only be successfully converted if it respects the diff --git a/docs/reference/esql/functions/to_geoshape.asciidoc b/docs/reference/esql/functions/to_geoshape.asciidoc index 8a6ec978dc7bf..2964e3c02fdf0 100644 --- a/docs/reference/esql/functions/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/to_geoshape.asciidoc @@ -22,8 +22,6 @@ Converts an input value to a `geo_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -*Supported types* - include::types/to_geoshape.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_lower.asciidoc b/docs/reference/esql/functions/to_lower.asciidoc index 5b98d82c9a94f..165b59528b43b 100644 --- a/docs/reference/esql/functions/to_lower.asciidoc +++ b/docs/reference/esql/functions/to_lower.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Returns a new string representing the input string converted to lower case. -*Supported types* - include::types/to_lower.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_string.asciidoc b/docs/reference/esql/functions/to_string.asciidoc index e771915977d97..be0ba3583c5da 100644 --- a/docs/reference/esql/functions/to_string.asciidoc +++ b/docs/reference/esql/functions/to_string.asciidoc @@ -18,8 +18,6 @@ Input value. The input can be a single- or multi-valued column or an expression. Converts an input value into a string. -*Supported types* - include::types/to_string.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_upper.asciidoc b/docs/reference/esql/functions/to_upper.asciidoc index cea63bcbb4bb0..282c6aa65e691 100644 --- a/docs/reference/esql/functions/to_upper.asciidoc +++ b/docs/reference/esql/functions/to_upper.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Returns a new string representing the input string converted to upper case. -*Supported types* - include::types/to_upper.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/to_version.asciidoc b/docs/reference/esql/functions/to_version.asciidoc index 6a1583889c87f..878f57f604933 100644 --- a/docs/reference/esql/functions/to_version.asciidoc +++ b/docs/reference/esql/functions/to_version.asciidoc @@ -20,8 +20,6 @@ Input value. The input can be a single- or multi-valued column or an expression. Converts an input string to a version value. -*Supported types* - include::types/to_version.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/trim.asciidoc b/docs/reference/esql/functions/trim.asciidoc index 0b246b7526cd2..341c09d114e88 100644 --- a/docs/reference/esql/functions/trim.asciidoc +++ b/docs/reference/esql/functions/trim.asciidoc @@ -16,8 +16,6 @@ String expression. If `null`, the function returns `null`. Removes leading and trailing whitespaces from strings. -*Supported types* - include::types/trim.asciidoc[] *Example* diff --git a/docs/reference/esql/functions/types/abs.asciidoc b/docs/reference/esql/functions/types/abs.asciidoc index 54341360fed3f..ff3073a11986b 100644 --- a/docs/reference/esql/functions/types/abs.asciidoc +++ b/docs/reference/esql/functions/types/abs.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/acos.asciidoc b/docs/reference/esql/functions/types/acos.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/acos.asciidoc +++ b/docs/reference/esql/functions/types/acos.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc index 3665c112d802d..ff9cb0cdd467b 100644 --- a/docs/reference/esql/functions/types/add.asciidoc +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/asin.asciidoc b/docs/reference/esql/functions/types/asin.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/asin.asciidoc +++ b/docs/reference/esql/functions/types/asin.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/atan.asciidoc b/docs/reference/esql/functions/types/atan.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/atan.asciidoc +++ b/docs/reference/esql/functions/types/atan.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/atan2.asciidoc b/docs/reference/esql/functions/types/atan2.asciidoc index 74fffe9056a16..9684923c65edc 100644 --- a/docs/reference/esql/functions/types/atan2.asciidoc +++ b/docs/reference/esql/functions/types/atan2.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== y | x | result diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index e0ede29e40df1..9fd29f7d4c718 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== field | buckets | from | to | result diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 3bf3d8ad3d713..4190128d49b93 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== condition | rest | result diff --git a/docs/reference/esql/functions/types/ceil.asciidoc b/docs/reference/esql/functions/types/ceil.asciidoc index 54341360fed3f..ff3073a11986b 100644 --- a/docs/reference/esql/functions/types/ceil.asciidoc +++ b/docs/reference/esql/functions/types/ceil.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 2daf6126d6fb0..7d538636d6aec 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== expression | expressionX | result diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index 1f14abf9c498f..d3d559cf9036e 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest | result diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc index b4e5c6ad5e0b5..68a884fe895b1 100644 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== unit | startTimestamp | endTimestamp | result diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index edd244548fb18..28bb85bca8312 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== date_part | field | result diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index f4922b9bf9c61..6402513dcbe8d 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== datePattern | dateString | result diff --git a/docs/reference/esql/functions/types/div.asciidoc b/docs/reference/esql/functions/types/div.asciidoc index eee2d68e4653f..c7bab205dc96f 100644 --- a/docs/reference/esql/functions/types/div.asciidoc +++ b/docs/reference/esql/functions/types/div.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/e.asciidoc b/docs/reference/esql/functions/types/e.asciidoc index 5854465d5fb49..38679594f7733 100644 --- a/docs/reference/esql/functions/types/e.asciidoc +++ b/docs/reference/esql/functions/types/e.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/ends_with.asciidoc b/docs/reference/esql/functions/types/ends_with.asciidoc index 88489185b41f7..ce79dbeeb7afe 100644 --- a/docs/reference/esql/functions/types/ends_with.asciidoc +++ b/docs/reference/esql/functions/types/ends_with.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | suffix | result diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/floor.asciidoc b/docs/reference/esql/functions/types/floor.asciidoc index 54341360fed3f..ff3073a11986b 100644 --- a/docs/reference/esql/functions/types/floor.asciidoc +++ b/docs/reference/esql/functions/types/floor.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 0e4ebb2d45a31..03df3780236f2 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest | result diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 0e4ebb2d45a31..03df3780236f2 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== first | rest | result diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index 6899a408969f7..78a95bb801378 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | length | result diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index de84fe63c794a..fac7f22999714 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/log.asciidoc b/docs/reference/esql/functions/types/log.asciidoc index d72ea848c349f..a04ceebada910 100644 --- a/docs/reference/esql/functions/types/log.asciidoc +++ b/docs/reference/esql/functions/types/log.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== base | value | result diff --git a/docs/reference/esql/functions/types/log10.asciidoc b/docs/reference/esql/functions/types/log10.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/log10.asciidoc +++ b/docs/reference/esql/functions/types/log10.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/ltrim.asciidoc b/docs/reference/esql/functions/types/ltrim.asciidoc index 26f4e7633d8ae..8174a8b93bcd5 100644 --- a/docs/reference/esql/functions/types/ltrim.asciidoc +++ b/docs/reference/esql/functions/types/ltrim.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/mod.asciidoc b/docs/reference/esql/functions/types/mod.asciidoc index eee2d68e4653f..c7bab205dc96f 100644 --- a/docs/reference/esql/functions/types/mod.asciidoc +++ b/docs/reference/esql/functions/types/mod.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc index 2f5100b1d1494..8bf1f721d316f 100644 --- a/docs/reference/esql/functions/types/mul.asciidoc +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index 0bba9b341c301..f23c3b15363b5 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== field | result diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index e3ea8b0830f47..fa4e7af29059e 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | delim | result diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index a2e7119bab05d..f917938233cf4 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index dc1175ccdd951..a66c7e21aaab3 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 620c7cf13b771..82450a489a895 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 620c7cf13b771..82450a489a895 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index 1a9a1bee08388..555230a1c7252 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index 4bb9cf6c7a1cb..eb0c9996f0bd4 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index 1a9a1bee08388..555230a1c7252 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 1891fed3631e9..a832424f25560 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | start | end | result diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index 01416cdd71ae6..60056c5369bd1 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== field | order | result diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index 4bb9cf6c7a1cb..eb0c9996f0bd4 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/mv_zip.asciidoc b/docs/reference/esql/functions/types/mv_zip.asciidoc index 6ee6c29c77264..04495b9c6ee17 100644 --- a/docs/reference/esql/functions/types/mv_zip.asciidoc +++ b/docs/reference/esql/functions/types/mv_zip.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== mvLeft | mvRight | delim | result diff --git a/docs/reference/esql/functions/types/neg.asciidoc b/docs/reference/esql/functions/types/neg.asciidoc index 1b841483fb22e..7d378b0be53ce 100644 --- a/docs/reference/esql/functions/types/neg.asciidoc +++ b/docs/reference/esql/functions/types/neg.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index 27fb19b6d38a2..38c2418bef536 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/pi.asciidoc b/docs/reference/esql/functions/types/pi.asciidoc index 5854465d5fb49..38679594f7733 100644 --- a/docs/reference/esql/functions/types/pi.asciidoc +++ b/docs/reference/esql/functions/types/pi.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/pow.asciidoc b/docs/reference/esql/functions/types/pow.asciidoc index 0e22c123ebf53..fb2f9309539c5 100644 --- a/docs/reference/esql/functions/types/pow.asciidoc +++ b/docs/reference/esql/functions/types/pow.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== base | exponent | result diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 8c2be37bd63a0..3a9d25d69fa11 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | regex | newStr | result diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index 6899a408969f7..78a95bb801378 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | length | result diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 33e89c91f0bfe..96cfcad5adb1d 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== value | decimals | result diff --git a/docs/reference/esql/functions/types/rtrim.asciidoc b/docs/reference/esql/functions/types/rtrim.asciidoc index 26f4e7633d8ae..8174a8b93bcd5 100644 --- a/docs/reference/esql/functions/types/rtrim.asciidoc +++ b/docs/reference/esql/functions/types/rtrim.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index 4b5e6856c8fe2..affb344eecbcb 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | delim | result diff --git a/docs/reference/esql/functions/types/sqrt.asciidoc b/docs/reference/esql/functions/types/sqrt.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/sqrt.asciidoc +++ b/docs/reference/esql/functions/types/sqrt.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/st_x.asciidoc b/docs/reference/esql/functions/types/st_x.asciidoc index 94ed4b296f1d4..982ddf17ceaad 100644 --- a/docs/reference/esql/functions/types/st_x.asciidoc +++ b/docs/reference/esql/functions/types/st_x.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== point | result diff --git a/docs/reference/esql/functions/types/st_y.asciidoc b/docs/reference/esql/functions/types/st_y.asciidoc index 94ed4b296f1d4..982ddf17ceaad 100644 --- a/docs/reference/esql/functions/types/st_y.asciidoc +++ b/docs/reference/esql/functions/types/st_y.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== point | result diff --git a/docs/reference/esql/functions/types/starts_with.asciidoc b/docs/reference/esql/functions/types/starts_with.asciidoc index 863ddef3c0361..46d975723a43f 100644 --- a/docs/reference/esql/functions/types/starts_with.asciidoc +++ b/docs/reference/esql/functions/types/starts_with.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | prefix | result diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index 826c4f6274652..f2600f6201e90 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== lhs | rhs | result diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index f12a40c9253fb..0729e28f98ecc 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | start | length | result diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index 1df8dd6526f18..f34dea349ad12 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== n | result diff --git a/docs/reference/esql/functions/types/tau.asciidoc b/docs/reference/esql/functions/types/tau.asciidoc index 5854465d5fb49..38679594f7733 100644 --- a/docs/reference/esql/functions/types/tau.asciidoc +++ b/docs/reference/esql/functions/types/tau.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== result diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc index 7f543963eb090..6138578b26d9e 100644 --- a/docs/reference/esql/functions/types/to_boolean.asciidoc +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc index 081d879c4b713..3ae44dd04a67f 100644 --- a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc index 258a31169782d..a0fe12cf20875 100644 --- a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index bbd755f81f4da..ca89c8dd47d62 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc index 7cb7ca46022c2..210a63718baff 100644 --- a/docs/reference/esql/functions/types/to_degrees.asciidoc +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc index 38e8482b77544..5702d5a8c88e3 100644 --- a/docs/reference/esql/functions/types/to_double.asciidoc +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_geopoint.asciidoc b/docs/reference/esql/functions/types/to_geopoint.asciidoc index c464aec9e983c..579a93affebab 100644 --- a/docs/reference/esql/functions/types/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/types/to_geopoint.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_geoshape.asciidoc b/docs/reference/esql/functions/types/to_geoshape.asciidoc index 5fc8611ee2f92..faf922c8723e0 100644 --- a/docs/reference/esql/functions/types/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/types/to_geoshape.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc index bcea15b9ec80b..04cf58a2df364 100644 --- a/docs/reference/esql/functions/types/to_integer.asciidoc +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index 6d7f9338a9aeb..3e7412ce9b64a 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index 307f573f1db2d..e32f5d80ae92f 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_lower.asciidoc b/docs/reference/esql/functions/types/to_lower.asciidoc index 26f4e7633d8ae..8174a8b93bcd5 100644 --- a/docs/reference/esql/functions/types/to_lower.asciidoc +++ b/docs/reference/esql/functions/types/to_lower.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc index 7cb7ca46022c2..210a63718baff 100644 --- a/docs/reference/esql/functions/types/to_radians.asciidoc +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 773e396f41373..3e4253694e91c 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc index 76d9cf44f4dd2..b35ad29c0f193 100644 --- a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/to_upper.asciidoc b/docs/reference/esql/functions/types/to_upper.asciidoc index 26f4e7633d8ae..8174a8b93bcd5 100644 --- a/docs/reference/esql/functions/types/to_upper.asciidoc +++ b/docs/reference/esql/functions/types/to_upper.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/docs/reference/esql/functions/types/to_version.asciidoc b/docs/reference/esql/functions/types/to_version.asciidoc index ebb83f03a6fe6..66bbb421e8636 100644 --- a/docs/reference/esql/functions/types/to_version.asciidoc +++ b/docs/reference/esql/functions/types/to_version.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== v | result diff --git a/docs/reference/esql/functions/types/trim.asciidoc b/docs/reference/esql/functions/types/trim.asciidoc index 26f4e7633d8ae..8174a8b93bcd5 100644 --- a/docs/reference/esql/functions/types/trim.asciidoc +++ b/docs/reference/esql/functions/types/trim.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== str | result diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index f448cd184d9b2..a08cc92ef238f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -13,7 +13,7 @@ avg |"double avg(field:double|integer|long)" case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false -coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null." | [false, false] | true | false +coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | [false, false] | true | false concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false @@ -29,7 +29,7 @@ ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|te floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false -left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false +left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["The string from which to return a substring.", "The number of characters to return."] |keyword | "Returns the substring that extracts 'length' chars from 'str' starting from the left." | [false, false] | false | false length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index e19048a40dda9..6c3a1a7267da2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -104,7 +104,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.stream.Collectors; public final class EsqlFunctionRegistry extends FunctionRegistry { @@ -269,10 +268,19 @@ public String fullSignature() { return builder.toString(); } + /** + * The name of every argument. + */ public List argNames() { - return args.stream().map(ArgSignature::name).collect(Collectors.toList()); + return args.stream().map(ArgSignature::name).toList(); } + /** + * The description of every argument. + */ + public List argDescriptions() { + return args.stream().map(ArgSignature::description).toList(); + } } public static FunctionDescription description(FunctionDefinition def) { @@ -281,7 +289,7 @@ public static FunctionDescription description(FunctionDefinition def) { return new FunctionDescription(def.name(), List.of(), null, null, false, false); } Constructor constructor = constructors[0]; - FunctionInfo functionInfo = constructor.getAnnotation(FunctionInfo.class); + FunctionInfo functionInfo = functionInfo(def); String functionDescription = functionInfo == null ? "" : functionInfo.description().replace('\n', ' '); String[] returnType = functionInfo == null ? new String[] { "?" } : functionInfo.returnType(); var params = constructor.getParameters(); // no multiple c'tors supported @@ -304,4 +312,12 @@ public static FunctionDescription description(FunctionDefinition def) { return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); } + public static FunctionInfo functionInfo(FunctionDefinition def) { + var constructors = def.clazz().getConstructors(); + if (constructors.length == 0) { + return null; + } + Constructor constructor = constructors[0]; + return constructor.getAnnotation(FunctionInfo.class); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java new file mode 100644 index 0000000000000..0cee9d2c53cde --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/Example.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * An example of using a function that is rendered in the docs. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.CONSTRUCTOR) +public @interface Example { + /** + * The test file that contains the example. + */ + String file(); + + /** + * The tag that fences this example. + */ + String tag(); +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index cd2e710498e5e..ac0d3bea422b0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -23,4 +23,9 @@ String description() default ""; boolean isAggregation() default false; + + /** + * Examples of using this function that are rendered in the docs. + */ + Example[] examples() default {}; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index d4166f8dea5a2..d6c299b460f37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -43,7 +44,8 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo( returnType = { "boolean", "text", "integer", "keyword", "long" }, - description = "Returns the first of its arguments that is not null." + description = "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`.", + examples = { @Example(file = "null", tag = "coalesce") } ) public Coalesce( Source source, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 229bb665a6772..97783bb6323d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -43,12 +44,13 @@ public class Left extends EsqlScalarFunction { @FunctionInfo( returnType = "keyword", - description = "Return the substring that extracts length chars from the string starting from the left." + description = "Returns the substring that extracts 'length' chars from 'str' starting from the left.", + examples = { @Example(file = "string", tag = "left") } ) public Left( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, - @Param(name = "length", type = { "integer" }) Expression length + @Param(name = "str", type = { "keyword", "text" }, description = "The string from which to return a substring.") Expression str, + @Param(name = "length", type = { "integer" }, description = "The number of characters to return.") Expression length ) { super(source, Arrays.asList(str, length)); this.source = source; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1eaf9e7fb49ab..d58159bfd40e9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1060,7 +1060,7 @@ private static String buildSignatureSvg(String name) throws IOException { *

    * After each test method we add the signature it operated on via * {@link #trackSignature}. Once the test class is done we render - * all the unique signatures to a temp file with {@link #renderTypesTable}. + * all the unique signatures to a temp file with {@link #renderTypes}. * We use a temp file because that's all we're allowed to write to. * Gradle will move the files into the docs after this is done. *

    @@ -1084,28 +1084,33 @@ public void trackSignature() { } @AfterClass - public static void renderTypesTable() throws IOException { + public static void renderDocs() throws IOException { if (System.getProperty("generateDocs") == null) { return; } String name = functionName(); if (binaryOperator(name) != null) { - renderTypesTable(List.of("lhs", "rhs")); + renderTypes(List.of("lhs", "rhs")); return; } if (unaryOperator(name) != null) { - renderTypesTable(List.of("v")); + renderTypes(List.of("v")); return; } FunctionDefinition definition = definition(name); if (definition != null) { - renderTypesTable(EsqlFunctionRegistry.description(definition).argNames()); + EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); + renderTypes(description.argNames()); + renderParametersList(description.argNames(), description.argDescriptions()); + renderDescription(description.description()); + boolean hasExamples = renderExamples(EsqlFunctionRegistry.functionInfo(definition)); + renderFullLayout(name, hasExamples); return; } LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); } - private static void renderTypesTable(List argNames) throws IOException { + private static void renderTypes(List argNames) throws IOException { StringBuilder header = new StringBuilder(); for (String arg : argNames) { header.append(arg).append(" | "); @@ -1127,6 +1132,10 @@ private static void renderTypesTable(List argNames) throws IOException { Collections.sort(table); String rendered = """ + // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + + *Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== """ + header + "\n" + table.stream().collect(Collectors.joining("\n")) + "\n|===\n"; @@ -1134,6 +1143,82 @@ private static void renderTypesTable(List argNames) throws IOException { writeToTempDir("types", rendered, "asciidoc"); } + private static void renderParametersList(List argNames, List argDescriptions) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append("*Parameters*\n"); + for (int a = 0; a < argNames.size(); a++) { + builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); + } + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing parameters for [{}]:\n{}", functionName(), rendered); + writeToTempDir("parameters", rendered, "asciidoc"); + } + + private static void renderDescription(String description) throws IOException { + String rendered = """ + // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + + *Description* + + """ + description + "\n"; + LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); + writeToTempDir("description", rendered, "asciidoc"); + } + + private static boolean renderExamples(FunctionInfo info) throws IOException { + if (info == null || info.examples().length == 0) { + return false; + } + StringBuilder builder = new StringBuilder(); + builder.append(""" + // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + + *Example$S$* + + """.replace("$S$", info.examples().length == 1 ? "" : "s")); + for (Example example : info.examples()) { + builder.append(""" + [source.merge.styled,esql] + ---- + include::{esql-specs}/$FILE$.csv-spec[tag=$TAG$] + ---- + [%header.monospaced.styled,format=dsv,separator=|] + |=== + include::{esql-specs}/$FILE$.csv-spec[tag=$TAG$-result] + |=== + """.replace("$FILE$", example.file()).replace("$TAG$", example.tag())); + } + builder.append('\n'); + String rendered = builder.toString(); + LogManager.getLogger(getTestClass()).info("Writing examples for [{}]:\n{}", functionName(), rendered); + writeToTempDir("examples", rendered, "asciidoc"); + return true; + } + + private static void renderFullLayout(String name, boolean hasExamples) throws IOException { + String rendered = """ + // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + + [discrete] + [[esql-$NAME$]] + === `$UPPER_NAME$` + + *Syntax* + + [.text-center] + image::esql/functions/signature/$NAME$.svg[Embedded,opts=inline] + + include::../parameters/$NAME$.asciidoc[] + include::../description/$NAME$.asciidoc[] + include::../types/$NAME$.asciidoc[] + """.replace("$NAME$", name).replace("$UPPER_NAME$", name.toUpperCase(Locale.ROOT)); + if (hasExamples) { + rendered += "include::../examples/" + name + ".asciidoc[]\n"; + } + LogManager.getLogger(getTestClass()).info("Writing layout for [{}]:\n{}", functionName(), rendered); + writeToTempDir("layout", rendered, "asciidoc"); + } + private static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { From 444866aec923217ee98c261dc20c1e7cfd0c6d15 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 12:51:57 -0700 Subject: [PATCH 039/214] Set explicit directory and file permissions on native libraries (#106505) The distributions already have correct permissions set on native libraries copied to them. However, the build itself to extract the native libs relies on the upstream file permissions. This commit sets explicit permissions on the copy task which extracts native libraries. --- libs/native/libraries/build.gradle | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 23d2b6e2219d9..621eb2d625af4 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -57,6 +57,12 @@ def extractLibs = tasks.register('extractLibs', Copy) { filesMatching("win32*/*") { it.path = it.path.replace("win32", "windows") } + filePermissions { + unix("644") + } + dirPermissions { + unix("755") + } } artifacts { From 55c3357c81d783f307b2aac15339ba127012795b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 13:40:52 -0700 Subject: [PATCH 040/214] Move common mrjar forbidden apis configuration to plugin (#106385) Since mrjars may use preview apis, forbidden apis must know about any preview apis from the jdk. However, we do not run forbidden apis with the preview enabled flag, nor in a separate jvm, so it does not know about these classes. Thus we ignore missing classes on source sets added by the mrjar plugin. This commit configures all sourcesets added by mrjar plugin to ignore forbidden apis missing classes. --- .../elasticsearch/gradle/internal/MrjarPlugin.java | 11 +++++++++++ libs/native/build.gradle | 8 -------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index a9b332c3cfd3c..adf84b63d8689 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -40,6 +41,7 @@ import javax.inject.Inject; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; import static org.objectweb.asm.Opcodes.V_PREVIEW; public class MrjarPlugin implements Plugin { @@ -121,6 +123,15 @@ private void addMrjarSourceset( compileTask.doLast(t -> { stripPreviewFromFiles(compileTask.getDestinationDirectory().getAsFile().get().toPath()); }); }); + + // Since we configure MRJAR sourcesets to allow preview apis, class signatures for those + // apis are not known by forbidden apis, so we must ignore all missing classes. We could, in theory, + // run forbidden apis in a separate jvm matching the sourceset jvm, but it's not worth + // the complexity (according to forbidden apis author!) + String forbiddenApisTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); + project.getTasks().withType(CheckForbiddenApisTask.class).named(forbiddenApisTaskName).configure(forbiddenApisTask -> { + forbiddenApisTask.setIgnoreMissingClasses(true); + }); } private static void stripPreviewFromFiles(Path compileDir) { diff --git a/libs/native/build.gradle b/libs/native/build.gradle index 150ca9c8e80f3..bfcba01b4783a 100644 --- a/libs/native/build.gradle +++ b/libs/native/build.gradle @@ -28,11 +28,3 @@ dependencies { tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } - -tasks.named('forbiddenApisMain21').configure { - ignoreMissingClasses = true -} - -tasks.named('forbiddenApisMain22').configure { - ignoreMissingClasses = true -} From 83bc94af68417d6baf014a4d457973623e33276f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 19 Mar 2024 13:41:14 -0700 Subject: [PATCH 041/214] Mark node seen task as successful (#106502) The task for updating cluster state with nodes seen by shutdown was previously switched to use batched tasks. However, the task is never marked as complete, which leads to the tasks piling up. This commit marks the task as complete and re-enables a test that appears to succeed now. closes #76689 --- .../org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java | 3 +-- .../java/org/elasticsearch/xpack/shutdown/NodeSeenService.java | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index e8d06e6f8cbe2..e2bcf10325fd6 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -70,7 +70,6 @@ public void testShardStatusStaysCompleteAfterNodeLeaves() throws Exception { * Similar to the previous test, but ensures that the status stays at `COMPLETE` when the node is offline when the shutdown is * registered. This may happen if {@link NodeSeenService} isn't working as expected. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76689") public void testShardStatusStaysCompleteAfterNodeLeavesIfRegisteredWhileNodeOffline() throws Exception { assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToRestartName = internalCluster().startNode(); @@ -92,7 +91,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { NodesInfoResponse nodes = clusterAdmin().prepareNodesInfo().clear().get(); assertThat(nodes.getNodes().size(), equalTo(1)); - assertNodeShutdownStatus(nodeToRestartId, COMPLETE); + assertBusy(() -> { assertNodeShutdownStatus(nodeToRestartId, COMPLETE); }); } /** diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java index 554b617774db3..83e72d4146640 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/NodeSeenService.java @@ -101,6 +101,7 @@ public ClusterState execute(BatchExecutionContext batc var nodesNotPreviouslySeen = new HashSet<>(); for (final var taskContext : batchExecutionContext.taskContexts()) { nodesNotPreviouslySeen.addAll(taskContext.getTask().nodesNotPreviouslySeen()); + taskContext.success(() -> {}); } var nodes = initialState.nodes(); From ea1672b00f78adb77d1635fbec70db464590aa35 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 19 Mar 2024 16:52:53 -0400 Subject: [PATCH 042/214] ESQL: Fix CSV tests (#106506) When we use `ROW` in ESQL we pick a random data set by just iterating the `Map`. It's random. Yay! And some of them don't work in this place. This just picks one that we know works. Closes #106501 --- .../test/java/org/elasticsearch/xpack/esql/CsvTests.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 0948387c011a8..662ae1a208ed0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -9,7 +9,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -143,7 +142,6 @@ * To log the results logResults() should return "true". */ // @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug") -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106501") public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); @@ -321,7 +319,11 @@ private static CsvTestsDataLoader.TestsDataset testsDataset(LogicalPlan parsed) var preAnalysis = new PreAnalyzer().preAnalyze(parsed); var indices = preAnalysis.indices; if (indices.size() == 0) { - return CSV_DATASET_MAP.values().iterator().next(); // default dataset for `row` source command + /* + * If the data set doesn't matter we'll just grab one we know works. + * Employees is fine. + */ + return CSV_DATASET_MAP.get("employees"); } else if (preAnalysis.indices.size() > 1) { throw new IllegalArgumentException("unexpected index resolution to multiple entries [" + preAnalysis.indices.size() + "]"); } From edbff946491099df5fa4d7c21e7314ed54459fbb Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 19 Mar 2024 17:44:53 -0400 Subject: [PATCH 043/214] [ML] Inference API Rate limiter (#106330) * Working tests * Adding more tests * Adding comment * Switching to micros and addressing feedback * Removing nanos and adding test for bug fix --------- Co-authored-by: Elastic Machine --- .../xpack/inference/common/RateLimiter.java | 148 ++++++++++++ .../inference/common/RateLimiterTests.java | 225 ++++++++++++++++++ 2 files changed, 373 insertions(+) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java new file mode 100644 index 0000000000000..ac28aa87f554b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -0,0 +1,148 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.Strings; + +import java.time.Clock; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +/** + * Implements a throttler using the token bucket algorithm. + * + * The general approach is to define the rate limiter with size (accumulated tokens limit) which dictates how many + * unused tokens can be saved up, and a rate at which the tokens are created. Then when a thread should be rate limited + * it can attempt to acquire a certain number of tokens (typically one for each item of work it's going to do). If unused tokens + * are available in the bucket already, those will be used. If the number of available tokens covers the desired amount + * the thread will not sleep. If the bucket does not contain enough tokens, it will calculate how long the thread needs to sleep + * to accumulate the requested amount of tokens. + * + * By setting the accumulated tokens limit to a value greater than zero, it effectively allows bursts of traffic. If the accumulated + * tokens limit is set to zero, it will force the acquiring thread to wait on each call. + */ +public class RateLimiter { + + private double tokensPerMicros; + private double accumulatedTokensLimit; + private double accumulatedTokens; + private Instant nextTokenAvailability; + private final Sleeper sleeper; + private final Clock clock; + + /** + * @param accumulatedTokensLimit the limit for tokens stashed in the bucket + * @param tokensPerTimeUnit the number of tokens to produce per the time unit passed in + * @param unit the time unit frequency for generating tokens + */ + public RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit) { + this(accumulatedTokensLimit, tokensPerTimeUnit, unit, new TimeUnitSleeper(), Clock.systemUTC()); + } + + // default for testing + RateLimiter(double accumulatedTokensLimit, double tokensPerTimeUnit, TimeUnit unit, Sleeper sleeper, Clock clock) { + this.sleeper = Objects.requireNonNull(sleeper); + this.clock = Objects.requireNonNull(clock); + nextTokenAvailability = Instant.MIN; + setRate(accumulatedTokensLimit, tokensPerTimeUnit, unit); + } + + public final synchronized void setRate(double newAccumulatedTokensLimit, double newTokensPerTimeUnit, TimeUnit newUnit) { + Objects.requireNonNull(newUnit); + + if (newAccumulatedTokensLimit < 0) { + throw new IllegalArgumentException("Accumulated tokens limit must be greater than or equal to 0"); + } + + if (Double.isInfinite(newAccumulatedTokensLimit)) { + throw new IllegalArgumentException( + Strings.format("Accumulated tokens limit must be less than or equal to %s", Double.MAX_VALUE) + ); + } + + if (newTokensPerTimeUnit <= 0) { + throw new IllegalArgumentException("Tokens per time unit must be greater than 0"); + } + + if (newTokensPerTimeUnit == Double.POSITIVE_INFINITY) { + throw new IllegalArgumentException(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE)); + } + + accumulatedTokens = Math.min(accumulatedTokens, newAccumulatedTokensLimit); + + accumulatedTokensLimit = newAccumulatedTokensLimit; + + var unitsInMicros = newUnit.toMicros(1); + tokensPerMicros = newTokensPerTimeUnit / unitsInMicros; + assert Double.isInfinite(tokensPerMicros) == false : "Tokens per microsecond should not be infinity"; + + accumulateTokens(); + } + + /** + * Causes the thread to wait until the tokens are available + * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here + * @throws InterruptedException _ + */ + public void acquire(int tokens) throws InterruptedException { + if (tokens <= 0) { + throw new IllegalArgumentException("Requested tokens must be positive"); + } + + double microsToWait; + synchronized (this) { + accumulateTokens(); + var accumulatedTokensToUse = Math.min(tokens, accumulatedTokens); + var additionalTokensRequired = tokens - accumulatedTokensToUse; + microsToWait = additionalTokensRequired / tokensPerMicros; + accumulatedTokens -= accumulatedTokensToUse; + nextTokenAvailability = nextTokenAvailability.plus((long) microsToWait, ChronoUnit.MICROS); + } + + sleeper.sleep((long) microsToWait); + } + + private void accumulateTokens() { + var now = Instant.now(clock); + if (now.isAfter(nextTokenAvailability)) { + var elapsedTimeMicros = microsBetweenExact(nextTokenAvailability, now); + var newTokens = tokensPerMicros * elapsedTimeMicros; + accumulatedTokens = Math.min(accumulatedTokensLimit, accumulatedTokens + newTokens); + nextTokenAvailability = now; + } + } + + private static long microsBetweenExact(Instant start, Instant end) { + try { + return ChronoUnit.MICROS.between(start, end); + } catch (ArithmeticException e) { + if (end.isAfter(start)) { + return Long.MAX_VALUE; + } + + return 0; + } + } + + // default for testing + Instant getNextTokenAvailability() { + return nextTokenAvailability; + } + + public interface Sleeper { + void sleep(long microsecondsToSleep) throws InterruptedException; + } + + static final class TimeUnitSleeper implements Sleeper { + public void sleep(long microsecondsToSleep) throws InterruptedException { + TimeUnit.MICROSECONDS.sleep(microsecondsToSleep); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java new file mode 100644 index 0000000000000..46931f12aaf4f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/RateLimiterTests.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import java.time.Clock; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RateLimiterTests extends ESTestCase { + public void testThrows_WhenAccumulatedTokensLimit_IsNegative() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(-1, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Accumulated tokens limit must be greater than or equal to 0")); + } + + public void testThrows_WhenAccumulatedTokensLimit_IsInfinity() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(Double.POSITIVE_INFINITY, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat( + exception.getMessage(), + is(Strings.format("Accumulated tokens limit must be less than or equal to %s", Double.MAX_VALUE)) + ); + } + + public void testThrows_WhenTokensPerTimeUnit_IsZero() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, 0, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Tokens per time unit must be greater than 0")); + } + + public void testThrows_WhenTokensPerTimeUnit_IsInfinity() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, Double.POSITIVE_INFINITY, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE))); + } + + public void testThrows_WhenTokensPerTimeUnit_IsNegative() { + var exception = expectThrows( + IllegalArgumentException.class, + () -> new RateLimiter(0, -1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()) + ); + assertThat(exception.getMessage(), is("Tokens per time unit must be greater than 0")); + } + + public void testAcquire_Throws_WhenTokens_IsZero() { + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); + var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(0)); + assertThat(exception.getMessage(), is("Requested tokens must be positive")); + } + + public void testAcquire_Throws_WhenTokens_IsNegative() { + var limiter = new RateLimiter(0, 1, TimeUnit.SECONDS, new RateLimiter.TimeUnitSleeper(), Clock.systemUTC()); + var exception = expectThrows(IllegalArgumentException.class, () -> limiter.acquire(-1)); + assertThat(exception.getMessage(), is("Requested tokens must be positive")); + } + + public void testAcquire_First_CallDoesNotSleep() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_DoesNotSleep_WhenTokenRateIsHigh() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsHigh() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, Double.MAX_VALUE, TimeUnit.MICROSECONDS, sleeper, clock); + limiter.acquire(Integer.MAX_VALUE); + verify(sleeper, times(1)).sleep(0); + } + + public void testAcquire_AcceptsMaxIntValue_WhenTokenRateIsLow() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + double tokensPerDay = 1; + var limiter = new RateLimiter(0, tokensPerDay, TimeUnit.DAYS, sleeper, clock); + limiter.acquire(Integer.MAX_VALUE); + + double tokensPerMicro = tokensPerDay / TimeUnit.DAYS.toMicros(1); + verify(sleeper, times(1)).sleep((long) ((double) Integer.MAX_VALUE / tokensPerMicro)); + } + + public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(2); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SleepsForOneMinute_WhenRequestingOneUnavailableToken_NoAccumulated() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SleepsFor10Minute_WhenRequesting10UnavailableToken_NoAccumulated() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(10); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(10)); + } + + public void testAcquire_IncrementsNextTokenAvailabilityInstant_ByOneMinute() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(0, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + assertThat(limiter.getNextTokenAvailability(), is(now.plus(1, ChronoUnit.MINUTES))); + } + + public void testAcquire_SecondCallToAcquire_ShouldWait_WhenAccumulatedTokensAreDepleted() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.MINUTES.toMicros(1)); + } + + public void testAcquire_SecondCallToAcquire_ShouldWaitForHalfDuration_WhenElapsedTimeIsHalfRequiredDuration() + throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(1, 1, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(0); + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); + limiter.acquire(1); + verify(sleeper, times(1)).sleep(TimeUnit.SECONDS.toMicros(30)); + } + + public void testAcquire_ShouldAccumulateTokens() throws InterruptedException { + var now = Clock.systemUTC().instant(); + var clock = mock(Clock.class); + when(clock.instant()).thenReturn(now); + + var sleeper = mock(RateLimiter.Sleeper.class); + + var limiter = new RateLimiter(10, 10, TimeUnit.MINUTES, sleeper, clock); + limiter.acquire(5); + verify(sleeper, times(1)).sleep(0); + // it should accumulate 5 tokens + when(clock.instant()).thenReturn(now.plus(Duration.ofSeconds(30))); + limiter.acquire(10); + verify(sleeper, times(2)).sleep(0); + } +} From 8cbece4d60fd04d7bf347fb03131606be48cff8b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 19 Mar 2024 15:05:04 -0700 Subject: [PATCH 044/214] Fix testCancelRequestWhenFailingFetchingPages (#106447) If we proceed without waiting for pages, we might cancel the main request before starting the data-node request. As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is longer than the assertBusy timeout. Closes #106443 --- .../xpack/esql/action/EsqlActionTaskIT.java | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index ffbf32b7d10e7..23fa3f862a3ff 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -330,7 +330,6 @@ private void assertCancelled(ActionFuture response) throws Ex * Ensure that when some exchange requests fail, we cancel the ESQL request, and complete all * exchange sinks with the failure, despite having outstanding pages in the buffer. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106443") public void testCancelRequestWhenFailingFetchingPages() throws Exception { String coordinator = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); String dataNode = internalCluster().startDataOnlyNode(); @@ -368,6 +367,9 @@ protected void doRun() throws Exception { request.pragmas(randomPragmas()); PlainActionFuture future = new PlainActionFuture<>(); client.execute(EsqlQueryAction.INSTANCE, request, future); + ExchangeService exchangeService = internalCluster().getInstance(ExchangeService.class, dataNode); + boolean waitedForPages; + final String sessionId; try { List foundTasks = new ArrayList<>(); assertBusy(() -> { @@ -381,12 +383,12 @@ protected void doRun() throws Exception { assertThat(tasks, hasSize(1)); foundTasks.addAll(tasks); }); - String sessionId = foundTasks.get(0).taskId().toString(); - ExchangeService exchangeService = internalCluster().getInstance(ExchangeService.class, dataNode); + sessionId = foundTasks.get(0).taskId().toString(); assertTrue(fetchingStarted.await(1, TimeUnit.MINUTES)); ExchangeSinkHandler exchangeSink = exchangeService.getSinkHandler(sessionId); - if (randomBoolean()) { - // do not fail exchange requests when we have some pages + waitedForPages = randomBoolean(); + if (waitedForPages) { + // do not fail exchange requests until we have some pages assertBusy(() -> assertThat(exchangeSink.bufferSize(), greaterThan(0))); } } finally { @@ -394,6 +396,12 @@ protected void doRun() throws Exception { } Exception failure = expectThrows(Exception.class, () -> future.actionGet().close()); assertThat(failure.getMessage(), containsString("failed to fetch pages")); + // If we proceed without waiting for pages, we might cancel the main request before starting the data-node request. + // As a result, the exchange sinks on data-nodes won't be removed until the inactive_timeout elapses, which is + // longer than the assertBusy timeout. + if (waitedForPages == false) { + exchangeService.finishSinkHandler(sessionId, failure); + } } finally { transportService.clearAllRules(); } From d5565b618a9b35bcae13052ef9dbcf0ddf0b8fbb Mon Sep 17 00:00:00 2001 From: Athena Brown Date: Tue, 19 Mar 2024 17:08:41 -0600 Subject: [PATCH 045/214] Fix typo in OIDC docs (#106207) (#106517) Add missing _to_ in sentence (cherry picked from commit 40a9155b3b53925b335530da4e076e39e66ee8f7) Co-authored-by: Aaron Hanusa --- docs/reference/security/authentication/oidc-guide.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/security/authentication/oidc-guide.asciidoc b/docs/reference/security/authentication/oidc-guide.asciidoc index c2112b949c540..bbb37853ca3ac 100644 --- a/docs/reference/security/authentication/oidc-guide.asciidoc +++ b/docs/reference/security/authentication/oidc-guide.asciidoc @@ -22,7 +22,7 @@ The OpenID Connect Provider (OP) is the entity in OpenID Connect that is respons authenticating the user and for granting the necessary tokens with the authentication and user information to be consumed by the Relying Parties. -In order for the Elastic Stack to be able use your OpenID Connect Provider for authentication, +In order for the Elastic Stack to be able to use your OpenID Connect Provider for authentication, a trust relationship needs to be established between the OP and the RP. In the OpenID Connect Provider, this means registering the RP as a client. OpenID Connect defines a dynamic client registration protocol but this is usually geared towards real-time client registration and From dbb7847d55f81eabac4f916e88329d4e55e43aff Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 20 Mar 2024 10:05:02 +0200 Subject: [PATCH 046/214] Expose lookup of realm domain config by realm id (#106424) The scope here is to expose a method (Realms#getRealmRef) that can be used to retrieve the realm domain assignments for any realm id. --- .../core/security/authc/Authentication.java | 14 + .../xpack/core/security/authc/Realm.java | 9 +- .../action/user/TransportGetUsersAction.java | 8 +- .../action/user/TransportQueryUserAction.java | 8 +- .../xpack/security/authc/Realms.java | 63 +++- .../user/TransportGetUsersActionTests.java | 48 +-- .../user/TransportQueryUserActionTests.java | 13 +- .../xpack/security/authc/RealmsTests.java | 346 +++++++++++++++--- .../support/DummyUsernamePasswordRealm.java | 7 +- 9 files changed, 386 insertions(+), 130 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index d71690f3dc8e7..6a06be1b63b77 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -28,7 +28,9 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.profile.Profile; import org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.RoleDescriptorsBytes; +import org.elasticsearch.xpack.core.security.authc.RealmConfig.RealmIdentifier; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.service.ServiceAccountSettings; @@ -1008,6 +1010,11 @@ public String toString() { return builder.toString(); } + /** + * {@link RealmRef} expresses the grouping of realms, identified with {@link RealmIdentifier}s, under {@link RealmDomain}s. + * A domain groups different realms, such that any username, authenticated by different realms from the same domain, + * is to be associated to a single {@link Profile}. + */ public static class RealmRef implements Writeable, ToXContentObject { private final String nodeName; @@ -1082,6 +1089,13 @@ public String getType() { return domain; } + /** + * The {@code RealmIdentifier} is the fully qualified way to refer to a realm. + */ + public RealmIdentifier getIdentifier() { + return new RealmIdentifier(type, name); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java index a6a3d926db689..63989ee86b3a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Realm.java @@ -14,7 +14,6 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; -import org.elasticsearch.xpack.core.security.authc.RealmConfig.RealmIdentifier; import org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings; import org.elasticsearch.xpack.core.security.user.User; @@ -22,7 +21,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; /** * An authentication mechanism to which the default authentication org.elasticsearch.xpack.security.authc.AuthenticationService @@ -145,8 +143,11 @@ public void usageStats(ActionListener> listener) { listener.onResponse(stats); } - public void initRealmRef(Map realmRefs) { - final RealmRef realmRef = Objects.requireNonNull(realmRefs.get(new RealmIdentifier(type(), name())), "realmRef must not be null"); + /** + * Must be called only once by the realms initialization logic, soon after this {@code Realm} is constructed, + * in order to link in the realm domain details, which may refer to any of the other realms. + */ + public void setRealmRef(RealmRef realmRef) { this.realmRef.set(realmRef); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index 78b5f3afc17cf..5f47cb9223f70 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.Realms; @@ -63,12 +62,7 @@ public TransportGetUsersAction( this.settings = settings; this.usersStore = usersStore; this.reservedRealm = reservedRealm; - this.nativeRealmRef = realms.getRealmRefs() - .values() - .stream() - .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) - .findFirst() - .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + this.nativeRealmRef = realms.getNativeRealmRef(); this.profileService = profileService; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java index c5a8795779f08..ca5b9fc54db47 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Subject; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.profile.ProfileService; @@ -57,12 +56,7 @@ public TransportQueryUserAction( super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager()); this.usersStore = usersStore; this.profileService = profileService; - this.nativeRealmRef = realms.getRealmRefs() - .values() - .stream() - .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) - .findFirst() - .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + this.nativeRealmRef = realms.getNativeRealmRef(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java index 2ca70bee55d4e..2c0436a000968 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Realms.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; @@ -110,7 +111,13 @@ public Realms( // initRealms will add default file and native realm config if they are not explicitly configured final List initialRealms = initRealms(realmConfigs); realmRefs = calculateRealmRefs(realmConfigs, realmToDomainConfig); - initialRealms.forEach(realm -> realm.initRealmRef(realmRefs)); + for (Realm realm : initialRealms) { + Authentication.RealmRef realmRef = Objects.requireNonNull( + realmRefs.get(new RealmConfig.RealmIdentifier(realm.type(), realm.name())), + "realmRef can not be null" + ); + realm.setRealmRef(realmRef); + } this.allConfiguredRealms = initialRealms; this.allConfiguredRealms.forEach(r -> r.initialize(this.allConfiguredRealms, licenseState)); @@ -155,6 +162,12 @@ private Map calculateRealm new Authentication.RealmRef(realmIdentifier.getName(), realmIdentifier.getType(), nodeName, realmDomain) ); } + assert realmRefs.values().stream().filter(realmRef -> ReservedRealm.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one reserved realm configured"; + assert realmRefs.values().stream().filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one native realm configured"; + assert realmRefs.values().stream().filter(realmRef -> FileRealmSettings.TYPE.equals(realmRef.getType())).toList().size() == 1 + : "there must be exactly one file realm configured"; return Map.copyOf(realmRefs); } @@ -368,8 +381,52 @@ public Map domainUsageStats() { } } - public Map getRealmRefs() { - return realmRefs; + /** + * Retrieves the {@link Authentication.RealmRef}, which contains the {@link DomainConfig}, if configured, + * for the passed in {@link RealmConfig.RealmIdentifier}. + * If the realm is not currently configured, {@code null} is returned. + */ + public @Nullable Authentication.RealmRef getRealmRef(RealmConfig.RealmIdentifier realmIdentifier) { + // "file", "native", and "reserved" realms may be renamed, but they refer to the same corpus of users + if (FileRealmSettings.TYPE.equals(realmIdentifier.getType())) { + return getFileRealmRef(); + } else if (NativeRealmSettings.TYPE.equals(realmIdentifier.getType())) { + return getNativeRealmRef(); + } else if (ReservedRealm.TYPE.equals(realmIdentifier.getType())) { + return getReservedRealmRef(); + } else { + // but for other realms, it is assumed that a different realm name or realm type signifies a different corpus of users + return realmRefs.get(realmIdentifier); + } + } + + public Authentication.RealmRef getNativeRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); + } + + public Authentication.RealmRef getFileRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> FileRealmSettings.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("file realm realm ref not found")); + } + + public Authentication.RealmRef getReservedRealmRef() { + return realmRefs.values() + .stream() + .filter(realmRef -> ReservedRealm.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("reserved realm realm ref not found")); + } + + // should only be useful for testing + int getRealmRefsCount() { + return realmRefs.size(); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index b6a1523b09784..9f3dccd8b2a48 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.core.security.action.user.GetUsersResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.AnonymousUser; @@ -114,12 +113,7 @@ public void testAnonymousUser() { when(securityIndex.isAvailable(SecurityIndexManager.Availability.SEARCH_SHARDS)).thenReturn(true); AnonymousUser anonymousUser = new AnonymousUser(settings); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, threadPool); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -195,12 +189,7 @@ public void testReservedUsersOnly() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); final Collection allReservedUsers = userFuture.actionGet(); @@ -284,12 +273,7 @@ public void testGetAllUsers() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -390,12 +374,7 @@ public void testGetUsersWithProfileUidException() { new AnonymousUser(settings), threadPool ); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -445,12 +424,7 @@ private void testGetStoreOnlyUsers(List storeUsers) { NativeUsersStore usersStore = mock(NativeUsersStore.class); AnonymousUser anonymousUser = new AnonymousUser(settings); ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, threadPool); - reservedRealm.initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME), - new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node") - ) - ); + reservedRealm.setRealmRef(new Authentication.RealmRef(ReservedRealm.NAME, ReservedRealm.TYPE, "node")); TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -596,16 +570,8 @@ private List randomUsersWithInternalUsernames() { private Realms mockRealms() { final Realms realms = mock(Realms.class); - when(realms.getRealmRefs()).thenReturn( - Map.of( - new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, NativeRealmSettings.DEFAULT_NAME), - new Authentication.RealmRef( - NativeRealmSettings.DEFAULT_NAME, - NativeRealmSettings.TYPE, - randomAlphaOfLengthBetween(3, 8), - null - ) - ) + when(realms.getNativeRealmRef()).thenReturn( + new Authentication.RealmRef(NativeRealmSettings.DEFAULT_NAME, NativeRealmSettings.TYPE, randomAlphaOfLengthBetween(3, 8), null) ); return realms; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java index 1c14da149cbd3..3fb3a816baa8b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.Subject; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.user.User; @@ -305,16 +304,8 @@ private ProfileService mockProfileService(boolean throwException, boolean profil private Realms mockRealms() { final Realms realms = mock(Realms.class); - when(realms.getRealmRefs()).thenReturn( - Map.of( - new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, NativeRealmSettings.DEFAULT_NAME), - new Authentication.RealmRef( - NativeRealmSettings.DEFAULT_NAME, - NativeRealmSettings.TYPE, - randomAlphaOfLengthBetween(3, 8), - null - ) - ) + when(realms.getNativeRealmRef()).thenReturn( + new Authentication.RealmRef(NativeRealmSettings.DEFAULT_NAME, NativeRealmSettings.TYPE, randomAlphaOfLengthBetween(3, 8), null) ); return realms; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index c21543500e29b..28b3a1ead9414 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -76,7 +76,6 @@ import java.util.stream.IntStream; import static org.elasticsearch.test.TestMatchers.throwableWithMessage; -import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -85,7 +84,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -162,6 +160,260 @@ private void setRealmAvailability(Function licenseStateListeners.forEach(LicenseStateListener::licenseStateChanged); } + public void testReservedRealmCannotBePartOfDomain() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String domainName = randomAlphaOfLength(7); + String nativeRealmName = randomFrom("n" + randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 4); + String fileRealmName = randomFrom("f" + randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(nativeRealmName + "," + ReservedRealm.NAME, ReservedRealm.NAME + "," + fileRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat(e.getMessage(), containsString("Undefined realms [reserved] cannot be assigned to domains")); + } + + public void testReservedRealmRef() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom("n" + randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 4); + String fileRealmName = randomFrom("f" + randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + builder.put("xpack.security.authc.reserved_realm.enabled", randomBoolean()); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + Authentication.RealmRef reservedRealmRef = randomFrom( + realms.getReservedRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, randomAlphaOfLength(4))) + ); + assertThat(reservedRealmRef.getName(), is(ReservedRealm.NAME)); + assertThat(reservedRealmRef.getType(), is(ReservedRealm.TYPE)); + assertThat(reservedRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(ReservedRealm.TYPE, ReservedRealm.NAME))); + // no domain + assertThat(reservedRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledNativeRealm() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom(randomAlphaOfLength(8), NativeRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", false); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(3)); // reserved, native, file + // there is still a realm ref for the disabled native realm + Authentication.RealmRef nativeRealmRef = randomFrom( + realms.getNativeRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(nativeRealmRef.getName(), is(nativeRealmName)); + assertThat(nativeRealmRef.getType(), is(NativeRealmSettings.TYPE)); + assertThat(nativeRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName))); + // no domain + assertThat(nativeRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledFileRealm() throws Exception { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String fileRealmName = randomFrom(randomAlphaOfLength(8), FileRealmSettings.DEFAULT_NAME); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", false); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(3)); // reserved, native, file + // there is still a realm ref for the disabled native realm + Authentication.RealmRef fileRealmRef = randomFrom( + realms.getFileRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(fileRealmRef.getName(), is(fileRealmName)); + assertThat(fileRealmRef.getType(), is(FileRealmSettings.TYPE)); + assertThat(fileRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName))); + // no domain + assertThat(fileRealmRef.getDomain(), nullValue()); + } + + public void testRealmRefForDisabledNativeRealmAssignedToDomain() throws Exception { + String domainName = randomAlphaOfLength(7); + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String nativeRealmName = randomFrom(randomAlphaOfLength(8), NativeRealmSettings.TYPE); + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".enabled", false); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.native." + nativeRealmName + ".order", 5); + } + builder.put("xpack.security.authc.realms.type_0.other.order", 4); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(nativeRealmName + ",other", "other," + nativeRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(4)); // reserved, native, file, + other + // the realm ref for the disabled native realm contains the domain + Authentication.RealmRef nativeRealmRef = randomFrom( + realms.getNativeRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(nativeRealmRef.getName(), is(nativeRealmName)); + assertThat(nativeRealmRef.getType(), is(NativeRealmSettings.TYPE)); + assertThat(nativeRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName))); + assertThat(nativeRealmRef.getDomain().name(), is(domainName)); + assertThat( + nativeRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + // the realm ref for the other realm also contains the domain with the native realm ref + Authentication.RealmRef otherRealmRef = realms.getRealmRef(new RealmConfig.RealmIdentifier("type_0", "other")); + assertThat(otherRealmRef.getName(), is("other")); + assertThat(otherRealmRef.getType(), is("type_0")); + assertThat(otherRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier("type_0", "other"))); + assertThat(otherRealmRef.getDomain().name(), is(domainName)); + assertThat( + otherRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, nativeRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + } + + public void testRealmRefForDisabledFileRealmAssignedToDomain() throws Exception { + String domainName = randomAlphaOfLength(7); + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + String fileRealmName = randomFrom(randomAlphaOfLength(8), FileRealmSettings.TYPE); + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".enabled", false); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.file." + fileRealmName + ".order", 5); + } + builder.put("xpack.security.authc.realms.type_0.other.order", 4); + builder.put( + "xpack.security.authc.domains." + domainName + ".realms", + randomFrom(fileRealmName + ",other", "other," + fileRealmName) + ); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); + assertThat(realms.getRealmRefsCount(), is(4)); // reserved, native, file, + other + // the realm ref for the disabled file realm contains the domain + Authentication.RealmRef fileRealmRef = randomFrom( + realms.getFileRealmRef(), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName)), + realms.getRealmRef(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, randomAlphaOfLength(4))) + ); + assertThat(fileRealmRef.getName(), is(fileRealmName)); + assertThat(fileRealmRef.getType(), is(FileRealmSettings.TYPE)); + assertThat(fileRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName))); + assertThat(fileRealmRef.getDomain().name(), is(domainName)); + assertThat( + fileRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + // the realm ref for the other realm also contains the domain with the native realm ref + Authentication.RealmRef otherRealmRef = realms.getRealmRef(new RealmConfig.RealmIdentifier("type_0", "other")); + assertThat(otherRealmRef.getName(), is("other")); + assertThat(otherRealmRef.getType(), is("type_0")); + assertThat(otherRealmRef.getIdentifier(), is(new RealmConfig.RealmIdentifier("type_0", "other"))); + assertThat(otherRealmRef.getDomain().name(), is(domainName)); + assertThat( + otherRealmRef.getDomain().realms(), + containsInAnyOrder( + new RealmConfig.RealmIdentifier(FileRealmSettings.TYPE, fileRealmName), + new RealmConfig.RealmIdentifier("type_0", "other") + ) + ); + } + + public void testMultipleNativeRealmsFails() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.native.n1.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native.n1.order", 10); + } else { + builder.put("xpack.security.authc.realms.native." + NativeRealmSettings.DEFAULT_NAME + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native." + NativeRealmSettings.DEFAULT_NAME + ".order", 10); + } + builder.put("xpack.security.authc.realms.native.n2.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.native.n2.order", 100); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat( + e.getMessage(), + containsString( + "multiple [native] realms are configured. [native] is an internal realm" + + " and therefore there can only be one such realm configured" + ) + ); + } + + public void testMultipleFileRealmsFails() { + Settings.Builder builder = Settings.builder() + .put("path.home", createTempDir()) + .put(Node.NODE_NAME_SETTING.getKey(), randomAlphaOfLengthBetween(3, 8)); + if (randomBoolean()) { + builder.put("xpack.security.authc.realms.file.f1.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file.f1.order", 10); + } else { + builder.put("xpack.security.authc.realms.file." + FileRealmSettings.DEFAULT_NAME + ".enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file." + FileRealmSettings.DEFAULT_NAME + ".order", 10); + } + builder.put("xpack.security.authc.realms.file.f2.enabled", randomBoolean()); + builder.put("xpack.security.authc.realms.file.f2.order", 100); + Settings settings = builder.build(); + Environment env = TestEnvironment.newEnvironment(settings); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new Realms(settings, env, factories, licenseState, threadContext, reservedRealm) + ); + assertThat( + e.getMessage(), + containsString( + "multiple [file] realms are configured. [file] is an internal realm" + + " and therefore there can only be one such realm configured" + ) + ); + } + public void testRealmTypeAvailable() { final Set basicRealmTypes = Sets.newHashSet("file", "native", "reserved"); final Set goldRealmTypes = Sets.newHashSet("ldap", "active_directory", "pki"); @@ -243,35 +495,31 @@ public void testWithSettings() throws Exception { assertThat(realms.getUnlicensedRealms(), sameInstance(realms.getUnlicensedRealms())); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); // reserved, file, native and custom realms - assertThat(realmRefs, aMapWithSize(3 + randomRealmTypesCount)); + assertThat(realms.getRealmRefsCount(), is(3 + randomRealmTypesCount)); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("reserved", "reserved"), buildRealmRef("reserved", "reserved", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", nodeName)) ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("file", "default_file"), buildRealmRef("default_file", "file", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", nodeName)) ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("native", "default_native"), buildRealmRef("default_native", "native", nodeName)) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", nodeName)) ); IntStream.range(0, randomRealmTypesCount) .forEach( index -> assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index), - buildRealmRef("realm_" + index, "type_" + index, nodeName) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index)), + is(buildRealmRef("realm_" + index, "type_" + index, nodeName)) ) ); } @@ -406,45 +654,31 @@ public void testDomainAssignment() throws Exception { }); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); - // reserved, file, native and custom realms - assertThat(realmRefs, aMapWithSize(3 + randomRealmTypesCount)); + assertThat(realms.getRealmRefsCount(), is(3 + randomRealmTypesCount)); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("reserved", "reserved"), - buildRealmRef("reserved", "reserved", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", nodeName, realmsForDomain)) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("file", "default_file"), - buildRealmRef("default_file", "file", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", nodeName, realmsForDomain)) ); assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("native", "default_native"), - buildRealmRef("default_native", "native", nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", nodeName, realmsForDomain)) ); IntStream.range(0, randomRealmTypesCount) .forEach( index -> assertThat( - realmRefs, - hasEntry( - new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index), - buildRealmRef("realm_" + index, "type_" + index, nodeName, realmsForDomain) - ) + realms.getRealmRef(new RealmConfig.RealmIdentifier("type_" + index, "realm_" + index)), + is(buildRealmRef("realm_" + index, "type_" + index, nodeName, realmsForDomain)) ) ); } @@ -614,20 +848,25 @@ public void testWithEmptySettings() throws Exception { assertThat(realms.getUnlicensedRealms(), sameInstance(realms.getUnlicensedRealms())); // realmRefs contains all implicitly and explicitly configured realm (disabled or not) - final Map realmRefs = realms.getRealmRefs(); realms.forEach( activeRealm -> assertThat( activeRealm.realmRef(), - equalTo(realmRefs.get(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) + is(realms.getRealmRef(new RealmConfig.RealmIdentifier(activeRealm.type(), activeRealm.name()))) ) ); // reserved, file, native - assertThat(realmRefs.size(), equalTo(3)); - assertThat(realmRefs, hasEntry(new RealmConfig.RealmIdentifier("reserved", "reserved"), buildRealmRef("reserved", "reserved", ""))); - assertThat(realmRefs, hasEntry(new RealmConfig.RealmIdentifier("file", "default_file"), buildRealmRef("default_file", "file", ""))); + assertThat(realms.getRealmRefsCount(), is(3)); + assertThat( + realms.getRealmRef(new RealmConfig.RealmIdentifier("reserved", "reserved")), + is(buildRealmRef("reserved", "reserved", "")) + ); + assertThat( + realms.getRealmRef(new RealmConfig.RealmIdentifier("file", "default_file")), + is(buildRealmRef("default_file", "file", "")) + ); assertThat( - realmRefs, - hasEntry(new RealmConfig.RealmIdentifier("native", "default_native"), buildRealmRef("default_native", "native", "")) + realms.getRealmRef(new RealmConfig.RealmIdentifier("native", "default_native")), + is(buildRealmRef("default_native", "native", "")) ); } @@ -682,8 +921,12 @@ public void testRealmsAreDisabledOnLicenseDowngrade() throws Exception { final Realms realms = new Realms(settings, env, factories, licenseState, threadContext, reservedRealm); assertThat(realms.getUnlicensedRealms(), empty()); assertThat(realms.getActiveRealms(), hasSize(9)); // 0..7 configured + reserved - final Map realmRefs = Map.copyOf(realms.getRealmRefs()); - assertThat(realmRefs, aMapWithSize(9)); + Map beforeRealmRefs = realms.getActiveRealms() + .stream() + .map(r -> new RealmConfig.RealmIdentifier(r.type(), r.name())) + .collect(Collectors.toMap(Function.identity(), realms::getRealmRef)); + assertThat(realms.getRealmRefsCount(), is(9)); + assertThat(beforeRealmRefs.size(), is(9)); verify(licenseState).enableUsageTracking(Security.KERBEROS_REALM_FEATURE, "kerberos_realm"); verify(licenseState).enableUsageTracking(Security.LDAP_REALM_FEATURE, "ldap_realm_1"); @@ -720,7 +963,8 @@ public void testRealmsAreDisabledOnLicenseDowngrade() throws Exception { assertThat(unlicensedRealmNames, containsInAnyOrder("kerberos_realm", "custom_realm_1", "custom_realm_2")); assertThat(realms.getActiveRealms(), hasSize(6)); // 9 - 3 // no change to realm refs - assertThat(realms.getRealmRefs(), equalTo(realmRefs)); + assertThat(realms.getRealmRefsCount(), is(beforeRealmRefs.size())); + beforeRealmRefs.forEach((ri, rr) -> assertThat(realms.getRealmRef(ri), is(rr))); verify(licenseState).disableUsageTracking(Security.KERBEROS_REALM_FEATURE, "kerberos_realm"); verify(licenseState).disableUsageTracking(Security.CUSTOM_REALMS_FEATURE, "custom_realm_1"); @@ -1060,7 +1304,7 @@ public void testNativeRealmNotAddedWhenNativeUsersDisabled() throws Exception { ); // We still want a ref to the native realm so that transport actions can reference it (but the realm is disabled) - assertThat(realms.getRealmRefs(), hasKey(realmId)); + assertThat(realms.getRealmRef(realmId), notNullValue()); } public void testNativeRealmNotAvailableWhenNativeUsersDisabled() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java index 19202bb109215..1eb7f672bc99c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DummyUsernamePasswordRealm.java @@ -27,12 +27,7 @@ public final class DummyUsernamePasswordRealm extends UsernamePasswordRealm { public DummyUsernamePasswordRealm(RealmConfig config) { super(config); - initRealmRef( - Map.of( - new RealmConfig.RealmIdentifier(config.type(), config.name()), - new Authentication.RealmRef(config.name(), config.type(), Node.NODE_NAME_SETTING.get(config.settings())) - ) - ); + setRealmRef(new Authentication.RealmRef(config.name(), config.type(), Node.NODE_NAME_SETTING.get(config.settings()))); this.users = new HashMap<>(); } From c30999bebb674574bed2cb53f5d8d45a199c2237 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 20 Mar 2024 19:05:43 +1100 Subject: [PATCH 047/214] [Test] Ranged read should read non-empty content (#106000) Empty read is [short-circuited](https://github.com/elastic/elasticsearch/blob/e8039b9ecb2451752ac5377c44a6a0c662087a9f/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java#L115-L116) without going to the blob store. In order to test s3 blob store, ranged read should read at least one byte. This PR ensures that. Resolves: #105958 --- .../repositories/s3/S3BlobContainerRetriesTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 05268d750637c..52cc48ddac589 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -708,8 +708,8 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_retries_forever"), new FlakyReadHandler()); // Ranged read - final int position = between(0, bytes.length - 1); - final int length = between(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); + final int position = between(0, bytes.length - 2); + final int length = between(1, randomBoolean() ? bytes.length : Integer.MAX_VALUE); logger.info("--> position={}, length={}", position, length); try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.INDICES, "read_blob_retries_forever", position, length)) { assertMetricsForOpeningStream(); From 6f607e4f7631083f4f69cc7df242721cb95a9149 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Wed, 20 Mar 2024 09:27:31 +0100 Subject: [PATCH 048/214] Use cluster features for ASYNC ESQL tests (#104466) + Add esql as rest test dependency for ml/native-multi-node-tests to work around the mixed testClusters/TestCluster nodes (so all have the esql plugin installed) --- .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 34 ++++++++++++++++++- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 5 ++- .../xpack/esql/plugin/EsqlFeatures.java | 10 ++++++ .../qa/native-multi-node-tests/build.gradle | 1 + 4 files changed, 48 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 9d22045522d19..c4ac6923b4d02 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -8,11 +8,17 @@ package org.elasticsearch.xpack.esql.qa.mixed; import org.elasticsearch.Version; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; +import org.junit.AfterClass; +import org.junit.Before; import org.junit.ClassRule; +import java.util.HashSet; +import java.util.Set; + import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; @@ -27,6 +33,32 @@ protected String getTestRestCluster() { static final Version bwcVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + private static final Set oldClusterFeatures = new HashSet<>(); + private static boolean oldClusterFeaturesInitialized = false; + + @Before + public void extractOldClusterFeatures() { + if (oldClusterFeaturesInitialized == false) { + oldClusterFeatures.addAll(testFeatureService.getAllSupportedFeatures()); + oldClusterFeaturesInitialized = true; + } + } + + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterFeaturesInitialized; + return oldClusterFeatures.contains(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + + @AfterClass + public static void cleanUp() { + oldClusterFeaturesInitialized = false; + oldClusterFeatures.clear(); + } + public MixedClusterEsqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase, Mode mode) { super(fileName, groupName, testName, lineNumber, testCase, mode); } @@ -42,6 +74,6 @@ protected void shouldSkipTest(String testName) { @Override protected boolean supportsAsync() { - return bwcVersion.onOrAfter(Version.V_8_13_0); + return oldClusterHasFeature(ASYNC_QUERY_FEATURE_ID); } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index a2aa77a1661d9..090941a9bc0be 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -54,6 +54,9 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { + // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here + protected static final String ASYNC_QUERY_FEATURE_ID = "esql.async_query"; + private static final Logger LOGGER = LogManager.getLogger(EsqlSpecTestCase.class); private final String fileName; private final String groupName; @@ -103,7 +106,7 @@ public void setup() throws IOException { } protected boolean supportsAsync() { - return Version.CURRENT.onOrAfter(Version.V_8_13_0); // the Async API was introduced in 8.13.0 + return clusterHasFeature(ASYNC_QUERY_FEATURE_ID); // the Async API was introduced in 8.13.0 } @AfterClass diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 2b7eadb16f444..fb891b0c87a13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -12,6 +12,7 @@ import org.elasticsearch.features.NodeFeature; import java.util.Map; +import java.util.Set; public class EsqlFeatures implements FeatureSpecification { /** @@ -42,6 +43,10 @@ public class EsqlFeatures implements FeatureSpecification { // */ // private static final NodeFeature GEO_SHAPE_SUPPORT = new NodeFeature("esql.geo_shape"); + public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); + + private static final NodeFeature MV_LOAD = new NodeFeature("esql.mv_load"); + @Override public Map getHistoricalFeatures() { return Map.ofEntries( @@ -53,4 +58,9 @@ public Map getHistoricalFeatures() { // Map.entry(GEO_SHAPE_SUPPORT, Version.V_8_13_0) ); } + + @Override + public Set getFeatures() { + return Set.of(ASYNC_QUERY); + } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index 7a96bec42eb7b..b451f2d4630bb 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -15,6 +15,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('monitoring')) javaRestTestImplementation project(path: xpackModule('transform')) javaRestTestImplementation project(path: xpackModule('rank-rrf')) + javaRestTestImplementation project(path: xpackModule('esql')) } // location for keys and certificates From 32bc182d9d27a68039bdf45ead74bb978193a495 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 20 Mar 2024 08:50:16 +0000 Subject: [PATCH 049/214] ESQL: expose transport action to improve usage (#106463) This commits exposes the query transport action to improve usage. While one can perform all operations prior to this change, it has been suggested that adding the action would improve the symmetry of the API by allowing e.g. client().execute(builder.action(), builder.request()).actionGet(30, SECONDS); --- .../xpack/core/esql/action/EsqlQueryRequestBuilder.java | 7 +++++++ .../test/esql/qa/action/CoreEsqlActionIT.java | 3 +++ 2 files changed, 10 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index c77d2613cd321..a0a2bbc3bed19 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -16,6 +16,8 @@ public abstract class EsqlQueryRequestBuilder extends ActionRequestBuilder { + private final ActionType action; + /** Creates a new ES|QL query request builder. */ public static EsqlQueryRequestBuilder newRequestBuilder( ElasticsearchClient client @@ -26,6 +28,11 @@ public abstract class EsqlQueryRequestBuilder action, Request request) { super(client, action, request); + this.action = action; + } + + public final ActionType action() { + return action; } public abstract EsqlQueryRequestBuilder query(String query); diff --git a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java index 7431c5b8204d7..8728b605134ac 100644 --- a/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java +++ b/x-pack/plugin/esql/qa/action/src/internalClusterTest/java/org/elasticsearch/test/esql/qa/action/CoreEsqlActionIT.java @@ -129,8 +129,11 @@ public void testAccessAfterClose() { protected EsqlQueryResponse run(EsqlQueryRequestBuilder request) { try { + // The variants here ensure API usage patterns if (randomBoolean()) { return request.execute().actionGet(30, SECONDS); + } else if (randomBoolean()) { + return client().execute(request.action(), request.request()).actionGet(30, SECONDS); } else { return ClientHelper.executeWithHeaders( Map.of("Foo", "bar"), From d01adfff60d568d920a3f9d9b6f2b257ad94189b Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Wed, 20 Mar 2024 10:03:04 +0100 Subject: [PATCH 050/214] Add links to text_expansion in ELSER tutorial (#106490) * Add links to text_expansion in ELSER tutorial * Apply suggestions from code review Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- .../semantic-search-elser.asciidoc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 0bee9533cd358..42c9ac4fb4357 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -184,10 +184,10 @@ follow the progress. [[text-expansion-query]] ==== Semantic search by using the `text_expansion` query -To perform semantic search, use the `text_expansion` query, and provide the -query text and the ELSER model ID. The example below uses the query text "How to -avoid muscle soreness after running?", the `content_embedding` field contains -the generated ELSER output: +To perform semantic search, use the <>, +and provide the query text and the ELSER model ID. The example below uses the +query text "How to avoid muscle soreness after running?", the `content_embedding` +field contains the generated ELSER output: [source,console] ---- @@ -258,9 +258,9 @@ tokens from source, refer to <> to learn more. [[text-expansion-compound-query]] ==== Combining semantic search with other queries -You can combine `text_expansion` with other queries in a -<>. For example using a filter clause in a -<> or a full text query which may or may not use the same +You can combine <> with other +queries in a <>. For example, use a filter clause +in a <> or a full text query with the same (or different) query text as the `text_expansion` query. This enables you to combine the search results from both queries. From 430758dd92561db8a1d0be7ed80cb882a2766bf3 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Wed, 20 Mar 2024 10:28:47 +0100 Subject: [PATCH 051/214] Test tweaks for compatible security tests (#106464) Test tweaks for serverless: * Valid application name in API key tests * Move from `cluster.health` to `info` call in roles test (the call is just used to check that a user with a cluster privilege is indeed able to execute the test) Closes: ES-7987 --- .../rest-api-spec/test/api_key/10_basic.yml | 16 ++++++++-------- .../rest-api-spec/test/roles/10_basic.yml | 7 +++++-- .../rest-api-spec/test/roles/11_idx_arrays.yml | 3 +-- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml index 09f08d59049ec..095f5c043457e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml @@ -21,7 +21,7 @@ setup: ], "applications": [ { - "application": "myapp", + "application": "kibana-.kibana", "privileges": ["*"], "resources": ["*"] } @@ -43,14 +43,14 @@ setup: security.put_privileges: body: > { - "myapp": { + "kibana-.kibana": { "read": { - "application": "myapp", + "application": "kibana-.kibana", "name": "read", "actions": [ "data:read/*" ] }, "write": { - "application": "myapp", + "application": "kibana-.kibana", "name": "write", "actions": [ "data:write/*" ] } @@ -71,7 +71,7 @@ teardown: - do: security.delete_privileges: - application: myapp + application: "kibana-.kibana" name: "read,write" ignore: 404 @@ -301,7 +301,7 @@ teardown: ], "applications": [ { - "application": "myapp", + "application": "kibana-.kibana", "privileges": ["read"], "resources": ["*"] } @@ -344,7 +344,7 @@ teardown: ], "application": [ { - "application" : "myapp", + "application" : "kibana-.kibana", "resources" : [ "*", "some-other-res" ], "privileges" : [ "data:read/me", "data:write/me" ] } @@ -369,7 +369,7 @@ teardown: } } } - match: { "application" : { - "myapp" : { + "kibana-.kibana" : { "*" : { "data:read/me" : true, "data:write/me" : false diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml index ff812a6e2cb9c..edc79a8ebfc9e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/10_basic.yml @@ -52,6 +52,8 @@ teardown: - match: { role: { created: true } } - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" security.put_role: name: "backwards_role" body: > @@ -69,10 +71,11 @@ teardown: - do: headers: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - cluster.health: {} - - match: { timed_out: false } + info: {} - do: + headers: + Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" security.get_role: name: "admin_role" - match: { admin_role.cluster.0: "all" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml index 21badf967b7aa..6ee155eb9aa71 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml @@ -72,8 +72,7 @@ teardown: - do: headers: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" - cluster.health: {} - - match: { timed_out: false } + info: {} - do: security.get_role: From 9980eacb41a3f36a1ed100a03c9082d8084dabbc Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 09:59:47 +0000 Subject: [PATCH 052/214] AwaitsFix for #106533 --- .../esql/expression/function/scalar/string/RLikeTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index d367a7f77c981..0e0fdc7cc47d6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; @@ -30,6 +31,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106533") public class RLikeTests extends AbstractFunctionTestCase { public RLikeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); From 5afc59b07ef5db039588f511f6c8c6d0dcef3352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 20 Mar 2024 11:05:52 +0100 Subject: [PATCH 053/214] [DOCS] Creates a semantic_text field type docs page. (#106528) --- docs/reference/mapping/types.asciidoc | 3 +++ docs/reference/mapping/types/semantic-text.asciidoc | 8 ++++++++ 2 files changed, 11 insertions(+) create mode 100644 docs/reference/mapping/types/semantic-text.asciidoc diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index fff736712529a..7ee1face25339 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -75,6 +75,7 @@ markup. Used for identifying named entities. <>:: Used for auto-complete suggestions. <>:: `text`-like type for as-you-type completion. +<>:: <>:: A count of tokens in a text. @@ -178,6 +179,8 @@ include::types/rank-features.asciidoc[] include::types/search-as-you-type.asciidoc[] +include::types/semantic-text.asciidoc[] + include::types/shape.asciidoc[] include::types/sparse-vector.asciidoc[] diff --git a/docs/reference/mapping/types/semantic-text.asciidoc b/docs/reference/mapping/types/semantic-text.asciidoc new file mode 100644 index 0000000000000..51d7693db12aa --- /dev/null +++ b/docs/reference/mapping/types/semantic-text.asciidoc @@ -0,0 +1,8 @@ +[role="xpack"] +[[semantic-text]] +=== Semantic text field type +++++ +Semantic text +++++ + +The documentation page for the `semantic_text` field type. \ No newline at end of file From fdd6d89050a6e9627883771186328354c952a4cf Mon Sep 17 00:00:00 2001 From: Navarone Feekery <13634519+navarone-feekery@users.noreply.github.com> Date: Wed, 20 Mar 2024 11:30:18 +0100 Subject: [PATCH 054/214] [Connectors API] Add missing `_api_key_id` docs (#106469) --- .../connector/apis/connector-apis.asciidoc | 2 + .../update-connector-api-key-id-api.asciidoc | 97 +++++++++++++++++++ .../api/connector.update_api_key_id.json | 2 +- 3 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index 2c43395a7fba1..6f03ded093479 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -33,6 +33,7 @@ Use the following APIs to manage connectors: * <> * <> * <> +* <> * <> * <> * <> @@ -77,6 +78,7 @@ include::list-connectors-api.asciidoc[] include::list-connector-sync-jobs-api.asciidoc[] include::set-connector-sync-job-error-api.asciidoc[] include::set-connector-sync-job-stats-api.asciidoc[] +include::update-connector-api-key-id-api.asciidoc[] include::update-connector-configuration-api.asciidoc[] include::update-connector-error-api.asciidoc[] include::update-connector-filtering-api.asciidoc[] diff --git a/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc new file mode 100644 index 0000000000000..e15f57a3e2b1e --- /dev/null +++ b/docs/reference/connector/apis/update-connector-api-key-id-api.asciidoc @@ -0,0 +1,97 @@ +[[update-connector-api-key-id-api]] +=== Update connector API key ID API +++++ +Update connector API key id +++++ + +preview::[] + +Updates the `api_key_id` and/or `api_key_secret_id` field(s) of a connector, specifying: + +. The ID of the API key used for authorization +. The ID of the Connector Secret where the API key is stored + +The Connector Secret ID is only required for native connectors. +Connector clients do not use this field. +See the documentation for {enterprise-search-ref}/native-connectors.html#native-connectors-manage-API-keys-programmatically[managing native connector API keys programmatically^] for more details. + +[[update-connector-api-key-id-api-request]] +==== {api-request-title} + +`PUT _connector//_api_key_id` + +[[update-connector-api-key-id-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. +* The `api_key_id` parameter should reference an existing API key. +* The `api_key_secret_id` parameter should reference an existing Connector Secret containing an encoded API key value. + +[[update-connector-api-key-id-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-api-key-id-api-request-body]] +==== {api-request-body-title} + +`api_key_id`:: +(Optional, string) ID of the API key that the connector will use to authorize access to required indices. Each connector can be associated with at most one API key. + +`api_key_secret_id`:: +(Optional, string) ID of the Connector Secret that contains the encoded API key. This should be the same API key as `api_key_id` references. This is only required for native connectors. + +[[update-connector-api-key-id-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `api_key_id` and/or `api_key_secret_id` field(s) successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-api-key-id-api-example]] +==== {api-examples-title} + +The following example updates the `api_key_id` and `api_key_secret_id` field(s) for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_api_key_id +{ + "api_key_id": "my-api-key-id", + "api_key_secret_id": "my-connector-secret-id" +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json index 5b58a7b5b59a5..cc989bfec8a8d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_api_key_id.json @@ -1,7 +1,7 @@ { "connector.update_api_key_id": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/connector-apis.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-api-key-id-api.html", "description": "Updates the API key id and/or API key secret id fields in the connector document." }, "stability": "experimental", From 4ef0a8f1068f9701c087c54cdbd23f49742b7e7f Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 20 Mar 2024 10:40:35 +0000 Subject: [PATCH 055/214] Revert "Re-enable the RemoteClusterSecurityBwcRestIT (#106052)" (#106534) This reverts commit 9b0f1cea8ceb603e1990bef99b5f7e895d7f617d. --- .../xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index fee5129f8c9b8..7c26b8e386cc5 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.remotecluster; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -40,6 +41,7 @@ /** * BWC test which ensures that users and API keys with defined {@code remote_indices} privileges can be used to query legacy remote clusters */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104858") public class RemoteClusterSecurityBwcRestIT extends AbstractRemoteClusterSecurityTestCase { private static final Version OLD_CLUSTER_VERSION = Version.fromString(System.getProperty("tests.old_cluster_version")); From 504ef7c9df8567e79288f2dbfb12db79ad3bf32c Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 10:55:56 +0000 Subject: [PATCH 056/214] Clarify threading in `SubscribableListener` (#106532) The docs about threading here are a little opaque to consumers, who typically just want to ensure that the listener is completed on a particular threadpool. This commit adds clearer docs about achieving that goal. --- .../action/support/SubscribableListener.java | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index 65739b01422a5..adebcfb4e12cb 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -159,7 +159,12 @@ private SubscribableListener(Object initialState) { * (or after) the completion of this listener. *

    * If the subscribed listener is not completed immediately then it will be completed on the thread, and in the {@link ThreadContext}, of - * the thread which completes this listener. + * the thread which completes this listener. In other words, if you want to ensure that {@code listener} is completed using a particular + * executor, then you must do both of: + *

      + *
    • Ensure that this {@link SubscribableListener} is always completed using that executor, and
    • + *
    • Invoke {@link #addListener} using that executor.
    • + *
    */ public final void addListener(ActionListener listener) { addListener(listener, EsExecutors.DIRECT_EXECUTOR_SERVICE, null); @@ -179,6 +184,13 @@ public final void addListener(ActionListener listener) { * @param executor If not {@link EsExecutors#DIRECT_EXECUTOR_SERVICE}, and the subscribing listener is not completed immediately, * then it will be completed using the given executor. If the subscribing listener is completed immediately then * this completion happens on the subscribing thread. + *

    + * In other words, if you want to ensure that {@code listener} is completed using a particular executor, then you + * must do both of: + *

      + *
    • Pass the desired executor in as {@code executor}, and
    • + *
    • Invoke {@link #addListener} using that executor.
    • + *
    * @param threadContext If not {@code null}, and the subscribing listener is not completed immediately, then it will be completed in * the given thread context. If {@code null}, and the subscribing listener is not completed immediately, then it * will be completed in the {@link ThreadContext} of the completing thread. If the subscribing listener is @@ -405,7 +417,13 @@ public void complete(ActionListener listener) { *

    * The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is * already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this - * listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. + * listener is incomplete then {@code nextStep} is invoked on the completing thread and in its thread context. In other words, if you + * want to ensure that {@code nextStep} is invoked using a particular executor, then you must do + * both of: + *

      + *
    • Ensure that this {@link SubscribableListener} is always completed using that executor, and
    • + *
    • Invoke {@link #andThen} using that executor.
    • + *
    */ public SubscribableListener andThen(CheckedBiConsumer, T, ? extends Exception> nextStep) { return andThen(EsExecutors.DIRECT_EXECUTOR_SERVICE, null, nextStep); @@ -427,7 +445,12 @@ public SubscribableListener andThen(CheckedBiConsumer, * The threading of the {@code nextStep} callback is the same as for listeners added with {@link #addListener}: if this listener is * already complete then {@code nextStep} is invoked on the thread calling {@link #andThen} and in its thread context, but if this * listener is incomplete then {@code nextStep} is invoked using {@code executor}, in a thread context captured when {@link #andThen} - * was called. + * was called. In other words, if you want to ensure that {@code nextStep} is invoked using a particular executor, then you must do + * both of: + *
      + *
    • Pass the desired executor in as {@code executor}, and
    • + *
    • Invoke {@link #andThen} using that executor.
    • + *
    */ public SubscribableListener andThen( Executor executor, From 0d6b368dc69d276fffe6bfd73ede3e67b9f7782c Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 20 Mar 2024 11:03:21 +0000 Subject: [PATCH 057/214] [ML] Fork trained model stats work to thread pool threads (#99439) The steps that assemble the trained model stats response largely ran on the transport worker thread. Even though none of the steps look massively expensive, it's likely one of them is, because we've observed client lockups (Kibana) while trained model stats is running. This change moves the steps into the ML utility thread pool to avoid any risk of blocking the transport thread. --------- Co-authored-by: David Turner --- .../TransportGetTrainedModelsStatsAction.java | 224 ++++++++++-------- .../InferenceProcessorInfoExtractor.java | 2 + 2 files changed, 122 insertions(+), 104 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 76321608ba4fb..dc04d65103f5f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestParameters; @@ -17,7 +18,8 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; @@ -26,8 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -37,6 +37,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.ml.action.GetDeploymentStatsAction; @@ -53,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelSizeStats; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -65,6 +67,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -73,7 +76,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.ml.utils.InferenceProcessorInfoExtractor.pipelineIdsByResource; -public class TransportGetTrainedModelsStatsAction extends HandledTransportAction< +public class TransportGetTrainedModelsStatsAction extends TransportAction< GetTrainedModelsStatsAction.Request, GetTrainedModelsStatsAction.Response> { @@ -82,25 +85,22 @@ public class TransportGetTrainedModelsStatsAction extends HandledTransportAction private final Client client; private final ClusterService clusterService; private final TrainedModelProvider trainedModelProvider; + private final Executor executor; @Inject public TransportGetTrainedModelsStatsAction( TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, + ThreadPool threadPool, TrainedModelProvider trainedModelProvider, Client client ) { - super( - GetTrainedModelsStatsAction.NAME, - transportService, - actionFilters, - GetTrainedModelsStatsAction.Request::new, - EsExecutors.DIRECT_EXECUTOR_SERVICE - ); + super(GetTrainedModelsStatsAction.NAME, actionFilters, transportService.getTaskManager()); this.client = client; this.clusterService = clusterService; this.trainedModelProvider = trainedModelProvider; + this.executor = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); } @Override @@ -108,6 +108,15 @@ protected void doExecute( Task task, GetTrainedModelsStatsAction.Request request, ActionListener listener + ) { + // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can + executor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); + } + + protected void doExecuteForked( + Task task, + GetTrainedModelsStatsAction.Request request, + ActionListener listener ) { final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final ModelAliasMetadata modelAliasMetadata = ModelAliasMetadata.fromState(clusterService.state()); @@ -116,101 +125,108 @@ protected void doExecute( GetTrainedModelsStatsAction.Response.Builder responseBuilder = new GetTrainedModelsStatsAction.Response.Builder(); - ListenableFuture> modelSizeStatsListener = new ListenableFuture<>(); - modelSizeStatsListener.addListener(listener.delegateFailureAndWrap((l, modelSizeStatsByModelId) -> { - responseBuilder.setModelSizeStatsByModelId(modelSizeStatsByModelId); - l.onResponse( - responseBuilder.build(modelToDeployments(responseBuilder.getExpandedModelIdsWithAliases().keySet(), assignmentMetadata)) - ); - })); - - ListenableFuture deploymentStatsListener = new ListenableFuture<>(); - deploymentStatsListener.addListener(listener.delegateFailureAndWrap((delegate, deploymentStats) -> { - // deployment stats for each matching deployment - // not necessarily for all models - responseBuilder.setDeploymentStatsByDeploymentId( - deploymentStats.getStats() - .results() + SubscribableListener + + .>>>newForked(l -> { + // When the request resource is a deployment find the model used in that deployment for the model stats + final var idExpression = addModelsUsedInMatchingDeployments(request.getResourceId(), assignmentMetadata); + + logger.debug("Expanded models/deployment Ids request [{}]", idExpression); + + // the request id may contain deployment ids + // It is not an error if these don't match a model id but + // they need to be included in case the deployment id is also + // a model id. Hence, the `matchedDeploymentIds` parameter + trainedModelProvider.expandIds( + idExpression, + request.isAllowNoResources(), + request.getPageParams(), + Collections.emptySet(), + modelAliasMetadata, + parentTaskId, + matchedDeploymentIds, + l + ); + }) + .andThenAccept(tuple -> responseBuilder.setExpandedModelIdsWithAliases(tuple.v2()).setTotalModelCount(tuple.v1())) + + .andThen( + (l, ignored) -> executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportNodesStatsAction.TYPE, + nodeStatsRequest(clusterService.state(), parentTaskId), + l + ) + ) + .>andThen(executor, null, (l, nodesStatsResponse) -> { + // find all pipelines whether using the model id, alias or deployment id. + Set allPossiblePipelineReferences = responseBuilder.getExpandedModelIdsWithAliases() + .entrySet() .stream() - .collect(Collectors.toMap(AssignmentStats::getDeploymentId, Function.identity())) - ); + .flatMap(entry -> Stream.concat(entry.getValue().stream(), Stream.of(entry.getKey()))) + .collect(Collectors.toSet()); + allPossiblePipelineReferences.addAll(matchedDeploymentIds); - int numberOfAllocations = deploymentStats.getStats().results().stream().mapToInt(AssignmentStats::getNumberOfAllocations).sum(); - modelSizeStats( - responseBuilder.getExpandedModelIdsWithAliases(), - request.isAllowNoResources(), - parentTaskId, - modelSizeStatsListener, - numberOfAllocations - ); - })); - - ListenableFuture> inferenceStatsListener = new ListenableFuture<>(); - // inference stats are per model and are only - // persisted for boosted tree models - inferenceStatsListener.addListener(listener.delegateFailureAndWrap((l, inferenceStats) -> { - responseBuilder.setInferenceStatsByModelId( - inferenceStats.stream().collect(Collectors.toMap(InferenceStats::getModelId, Function.identity())) - ); - getDeploymentStats(client, request.getResourceId(), parentTaskId, assignmentMetadata, deploymentStatsListener); - })); - - ListenableFuture nodesStatsListener = new ListenableFuture<>(); - nodesStatsListener.addListener(listener.delegateFailureAndWrap((delegate, nodesStatsResponse) -> { - // find all pipelines whether using the model id, - // alias or deployment id. - Set allPossiblePipelineReferences = responseBuilder.getExpandedModelIdsWithAliases() - .entrySet() - .stream() - .flatMap(entry -> Stream.concat(entry.getValue().stream(), Stream.of(entry.getKey()))) - .collect(Collectors.toSet()); - allPossiblePipelineReferences.addAll(matchedDeploymentIds); - - Map> pipelineIdsByResource = pipelineIdsByResource(clusterService.state(), allPossiblePipelineReferences); - Map modelIdIngestStats = inferenceIngestStatsByModelId( - nodesStatsResponse, - modelAliasMetadata, - pipelineIdsByResource - ); - responseBuilder.setIngestStatsByModelId(modelIdIngestStats); - trainedModelProvider.getInferenceStats( - responseBuilder.getExpandedModelIdsWithAliases().keySet().toArray(new String[0]), - parentTaskId, - inferenceStatsListener - ); - })); - - ListenableFuture>>> idsListener = new ListenableFuture<>(); - idsListener.addListener(listener.delegateFailureAndWrap((delegate, tuple) -> { - responseBuilder.setExpandedModelIdsWithAliases(tuple.v2()).setTotalModelCount(tuple.v1()); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - TransportNodesStatsAction.TYPE, - nodeStatsRequest(clusterService.state(), parentTaskId), - nodesStatsListener - ); - })); - - // When the request resource is a deployment find the - // model used in that deployment for the model stats - String idExpression = addModelsUsedInMatchingDeployments(request.getResourceId(), assignmentMetadata); - logger.debug("Expanded models/deployment Ids request [{}]", idExpression); - - // the request id may contain deployment ids - // It is not an error if these don't match a model id but - // they need to be included in case the deployment id is also - // a model id. Hence, the `matchedDeploymentIds` parameter - trainedModelProvider.expandIds( - idExpression, - request.isAllowNoResources(), - request.getPageParams(), - Collections.emptySet(), - modelAliasMetadata, - parentTaskId, - matchedDeploymentIds, - idsListener - ); + Map> pipelineIdsByResource = pipelineIdsByResource( + clusterService.state(), + allPossiblePipelineReferences + ); + Map modelIdIngestStats = inferenceIngestStatsByModelId( + nodesStatsResponse, + modelAliasMetadata, + pipelineIdsByResource + ); + responseBuilder.setIngestStatsByModelId(modelIdIngestStats); + trainedModelProvider.getInferenceStats( + responseBuilder.getExpandedModelIdsWithAliases().keySet().toArray(new String[0]), + parentTaskId, + l + ); + }) + .andThenAccept( + // inference stats are per model and are only persisted for boosted tree models + inferenceStats -> responseBuilder.setInferenceStatsByModelId( + inferenceStats.stream().collect(Collectors.toMap(InferenceStats::getModelId, Function.identity())) + ) + ) + + .andThen( + executor, + null, + (l, ignored) -> getDeploymentStats(client, request.getResourceId(), parentTaskId, assignmentMetadata, l) + ) + .andThenApply(deploymentStats -> { + // deployment stats for each matching deployment not necessarily for all models + responseBuilder.setDeploymentStatsByDeploymentId( + deploymentStats.getStats() + .results() + .stream() + .collect(Collectors.toMap(AssignmentStats::getDeploymentId, Function.identity())) + ); + return deploymentStats.getStats().results().stream().mapToInt(AssignmentStats::getNumberOfAllocations).sum(); + }) + + .>andThen( + executor, + null, + (l, numberOfAllocations) -> modelSizeStats( + responseBuilder.getExpandedModelIdsWithAliases(), + request.isAllowNoResources(), + parentTaskId, + l, + numberOfAllocations + ) + ) + .andThenAccept(responseBuilder::setModelSizeStatsByModelId) + + .andThenApply( + ignored -> responseBuilder.build( + modelToDeployments(responseBuilder.getExpandedModelIdsWithAliases().keySet(), assignmentMetadata) + ) + ) + + .addListener(listener, executor, null); } static String addModelsUsedInMatchingDeployments(String idExpression, TrainedModelAssignmentMetadata assignmentMetadata) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java index 48b570e927b15..5a2f044d1f7be 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/InferenceProcessorInfoExtractor.java @@ -13,6 +13,7 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.transport.Transports; import java.util.HashMap; import java.util.LinkedHashSet; @@ -75,6 +76,7 @@ public static int countInferenceProcessors(ClusterState state) { */ @SuppressWarnings("unchecked") public static Map> pipelineIdsByResource(ClusterState state, Set ids) { + assert Transports.assertNotTransportThread("non-trivial nested loops over cluster state structures"); Map> pipelineIdsByModelIds = new HashMap<>(); Metadata metadata = state.metadata(); if (metadata == null) { From 2087b65523e595bf45506b748dd8006d430d6595 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 20 Mar 2024 11:15:21 +0000 Subject: [PATCH 058/214] [ML] Create inference_user and inference_admin roles (#106371) Defines new inference_user and inference_admin roles with the related cluster privileges manage_inference and monitor_inference. inference_user can list the models and preform inference, inference_admin can do the same plus create and delete models --- .../inference/delete-inference.asciidoc | 4 +- .../inference/get-inference.asciidoc | 3 +- .../inference/post-inference.asciidoc | 5 +- .../inference/put-inference.asciidoc | 3 +- .../security/get-builtin-privileges.asciidoc | 2 + .../authorization/built-in-roles.asciidoc | 8 ++ .../privilege/ClusterPrivilegeResolver.java | 18 +++ .../authz/store/ReservedRolesStore.java | 26 ++++ .../authz/store/ReservedRolesStoreTests.java | 42 ++++++ .../inference/InferencePermissionsIT.java | 129 ++++++++++++++++++ .../test/privileges/11_builtin.yml | 2 +- 11 files changed, 236 insertions(+), 6 deletions(-) create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 850b4ef1b10b0..4b661236aa928 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -17,13 +17,15 @@ own model, use the <>. ==== {api-request-title} `DELETE /_inference/` + `DELETE /_inference//` [discrete] [[delete-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `manage_inference` <> +(the built-in `inference_admin` role grants this privilege) [discrete] diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 176909bc5458f..705bc4e7a8c61 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -28,7 +28,8 @@ own model, use the <>. [[get-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) [discrete] [[get-inference-api-desc]] diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 4fb6ea5a4fb6d..970cec7f4a452 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -17,6 +17,7 @@ own model, use the <>. ==== {api-request-title} `POST /_inference/` + `POST /_inference//` @@ -24,8 +25,8 @@ own model, use the <>. [[post-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. - +* Requires the `monitor_inference` <> +(the built-in `inference_admin` and `inference_user` roles grant this privilege) [discrete] [[post-inference-api-desc]] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 2c0d4d38548bb..399a0594987d2 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -25,7 +25,8 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-prereqs]] ==== {api-prereq-title} -* Requires the `manage` <>. +* Requires the `manage_inference` <> +(the built-in `inference_admin` role grants this privilege) [discrete] diff --git a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc index bd2d21317212b..576a30866dbdf 100644 --- a/docs/reference/rest-api/security/get-builtin-privileges.asciidoc +++ b/docs/reference/rest-api/security/get-builtin-privileges.asciidoc @@ -78,6 +78,7 @@ A successful call returns an object with "cluster" and "index" fields. "manage_enrich", "manage_ilm", "manage_index_templates", + "manage_inference", "manage_ingest_pipelines", "manage_logstash_pipelines", "manage_ml", @@ -99,6 +100,7 @@ A successful call returns an object with "cluster" and "index" fields. "monitor", "monitor_data_frame_transforms", "monitor_enrich", + "monitor_inference", "monitor_ml", "monitor_rollup", "monitor_snapshot", diff --git a/docs/reference/security/authorization/built-in-roles.asciidoc b/docs/reference/security/authorization/built-in-roles.asciidoc index 14808140873fd..6db08b307f193 100644 --- a/docs/reference/security/authorization/built-in-roles.asciidoc +++ b/docs/reference/security/authorization/built-in-roles.asciidoc @@ -69,6 +69,14 @@ Grants full access to all features in {kib} (including Solutions) and read-only Grants access to manage *all* enrich indices (`.enrich-*`) and *all* operations on ingest pipelines. +[[built-in-roles-inference-admin]] `inference_admin`:: +Provides all of the privileges of the `inference_user` role and the full +use of the {inference} APIs. Grants the `manage_inference` cluster privilege. + +[[built-in-roles-inference-user]] `inference_user`:: +Provides the minimum privileges required to view {inference} configurations +and perform inference. Grants the `monintor_inference` cluster privilege. + [[built-in-roles-ingest-user]] `ingest_admin` :: Grants access to manage *all* index templates and *all* ingest pipeline configurations. + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index c514fb07cc32b..501bd307b74ca 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -97,6 +97,10 @@ public class ClusterPrivilegeResolver { GetComponentTemplateAction.NAME, GetComposableIndexTemplateAction.NAME ); + private static final Set MONITOR_INFERENCE_PATTERN = Set.of( + "cluster:monitor/xpack/inference*", + "cluster:monitor/xpack/ml/trained_models/deployment/infer" + ); private static final Set MONITOR_ML_PATTERN = Set.of("cluster:monitor/xpack/ml/*"); private static final Set MONITOR_TEXT_STRUCTURE_PATTERN = Set.of("cluster:monitor/text_structure/*"); private static final Set MONITOR_TRANSFORM_PATTERN = Set.of("cluster:monitor/data_frame/*", "cluster:monitor/transform/*"); @@ -110,6 +114,13 @@ public class ClusterPrivilegeResolver { "indices:admin/index_template/*" ); private static final Predicate ACTION_MATCHER = Automatons.predicate(ALL_CLUSTER_PATTERN); + private static final Set MANAGE_INFERENCE_PATTERN = Set.of( + "cluster:admin/xpack/inference/*", + "cluster:monitor/xpack/inference*", // no trailing slash to match the POST InferenceAction name + "cluster:admin/xpack/ml/trained_models/deployment/start", + "cluster:admin/xpack/ml/trained_models/deployment/stop", + "cluster:monitor/xpack/ml/trained_models/deployment/infer" + ); private static final Set MANAGE_ML_PATTERN = Set.of("cluster:admin/xpack/ml/*", "cluster:monitor/xpack/ml/*"); private static final Set MANAGE_TRANSFORM_PATTERN = Set.of( "cluster:admin/data_frame/*", @@ -182,6 +193,10 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege NONE = new ActionClusterPrivilege("none", Set.of(), Set.of()); public static final NamedClusterPrivilege ALL = new ActionClusterPrivilege("all", ALL_CLUSTER_PATTERN); public static final NamedClusterPrivilege MONITOR = new ActionClusterPrivilege("monitor", MONITOR_PATTERN); + public static final NamedClusterPrivilege MONITOR_INFERENCE = new ActionClusterPrivilege( + "monitor_inference", + MONITOR_INFERENCE_PATTERN + ); public static final NamedClusterPrivilege MONITOR_ML = new ActionClusterPrivilege("monitor_ml", MONITOR_ML_PATTERN); public static final NamedClusterPrivilege MONITOR_TRANSFORM_DEPRECATED = new ActionClusterPrivilege( "monitor_data_frame_transforms", @@ -199,6 +214,7 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege MONITOR_ROLLUP = new ActionClusterPrivilege("monitor_rollup", MONITOR_ROLLUP_PATTERN); public static final NamedClusterPrivilege MONITOR_ENRICH = new ActionClusterPrivilege("monitor_enrich", MONITOR_ENRICH_PATTERN); public static final NamedClusterPrivilege MANAGE = new ActionClusterPrivilege("manage", ALL_CLUSTER_PATTERN, ALL_SECURITY_PATTERN); + public static final NamedClusterPrivilege MANAGE_INFERENCE = new ActionClusterPrivilege("manage_inference", MANAGE_INFERENCE_PATTERN); public static final NamedClusterPrivilege MANAGE_ML = new ActionClusterPrivilege("manage_ml", MANAGE_ML_PATTERN); public static final NamedClusterPrivilege MANAGE_TRANSFORM_DEPRECATED = new ActionClusterPrivilege( "manage_data_frame_transforms", @@ -348,6 +364,7 @@ public class ClusterPrivilegeResolver { NONE, ALL, MONITOR, + MONITOR_INFERENCE, MONITOR_ML, MONITOR_TEXT_STRUCTURE, MONITOR_TRANSFORM_DEPRECATED, @@ -356,6 +373,7 @@ public class ClusterPrivilegeResolver { MONITOR_ROLLUP, MONITOR_ENRICH, MANAGE, + MANAGE_INFERENCE, MANAGE_ML, MANAGE_TRANSFORM_DEPRECATED, MANAGE_TRANSFORM, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 2bfcf9a12366e..dc5b8bfcce262 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -373,6 +373,32 @@ private static Map initializeReservedRoles() { null ) ), + entry( + "inference_admin", + new RoleDescriptor( + "inference_admin", + new String[] { "manage_inference" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null + ) + ), + entry( + "inference_user", + new RoleDescriptor( + "inference_user", + new String[] { "monitor_inference" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null + ) + ), entry( "machine_learning_user", new RoleDescriptor( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 5e190f72c596c..4ff250c3a68b3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -266,6 +266,8 @@ public void testIsReserved() { assertThat(ReservedRolesStore.isReserved("transport_client"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("inference_admin"), is(true)); + assertThat(ReservedRolesStore.isReserved("inference_user"), is(true)); assertThat(ReservedRolesStore.isReserved("ingest_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("monitoring_user"), is(true)); assertThat(ReservedRolesStore.isReserved("reporting_user"), is(true)); @@ -3877,6 +3879,46 @@ public void testEnrichUserRole() { assertOnlyReadAllowed(role, ".enrich-foo"); } + public void testInferenceAdminRole() { + final TransportRequest request = mock(TransportRequest.class); + final Authentication authentication = AuthenticationTestHelper.builder().build(); + + RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("inference_admin"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); + assertTrue(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/stop", request, authentication)); + assertFalse(role.runAs().check(randomAlphaOfLengthBetween(1, 30))); + assertNoAccessAllowed(role, ".inference"); + } + + public void testInferenceUserRole() { + final TransportRequest request = mock(TransportRequest.class); + final Authentication authentication = AuthenticationTestHelper.builder().build(); + + RoleDescriptor roleDescriptor = ReservedRolesStore.roleDescriptor("inference_user"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role role = Role.buildFromRoleDescriptor(roleDescriptor, new FieldPermissionsCache(Settings.EMPTY), RESTRICTED_INDICES); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/inference/get", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + assertTrue(role.cluster().check("cluster:monitor/xpack/ml/trained_models/deployment/infer", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/start", request, authentication)); + assertFalse(role.cluster().check("cluster:admin/xpack/ml/trained_models/deployment/stop", request, authentication)); + assertFalse(role.runAs().check(randomAlphaOfLengthBetween(1, 30))); + assertNoAccessAllowed(role, ".inference"); + } + private IndexAbstraction mockIndexAbstraction(String name) { IndexAbstraction mock = mock(IndexAbstraction.class); when(mock.getName()).thenReturn(name); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java new file mode 100644 index 0000000000000..cc8096831f598 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferencePermissionsIT.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class InferencePermissionsIT extends ESRestTestCase { + + private static final String PASSWORD = "secret-test-password"; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("inference-service-test") + .user("x_pack_rest_user", "x-pack-test-password") + .user("test_inference_admin", PASSWORD, "inference_admin", false) + .user("test_inference_user", PASSWORD, "inference_user", false) + .user("test_no_privileged", PASSWORD, "", false) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + // use the privileged users here but not in the tests + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + public void testPermissions() throws IOException { + var putRequest = new Request("PUT", "_inference/sparse_embedding/permissions_test"); + putRequest.setJsonEntity(InferenceBaseRestTest.mockSparseServiceModelConfig()); + var getAllRequest = new Request("GET", "_inference/sparse_embedding/_all"); + var deleteRequest = new Request("DELETE", "_inference/sparse_embedding/permissions_test"); + + var putModelForTestingInference = new Request("PUT", "_inference/sparse_embedding/model_to_test_user_priv"); + putModelForTestingInference.setJsonEntity(InferenceBaseRestTest.mockSparseServiceModelConfig()); + + var inferRequest = new Request("POST", "_inference/sparse_embedding/model_to_test_user_priv"); + var bodyBuilder = new StringBuilder("{\"input\": ["); + for (var in : new String[] { "foo", "bar" }) { + bodyBuilder.append('"').append(in).append('"').append(','); + } + // remove last comma + bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); + bodyBuilder.append("]}"); + inferRequest.setJsonEntity(bodyBuilder.toString()); + + var deleteInferenceModel = new Request("DELETE", "_inference/sparse_embedding/model_to_test_user_priv"); + + try (RestClient inferenceAdminClient = buildClient(inferenceAdminClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(inferenceAdminClient, putRequest, true); + makeRequest(inferenceAdminClient, getAllRequest, true); + makeRequest(inferenceAdminClient, deleteRequest, true); + // create a model now as the other clients don't have the privilege to do so + makeRequest(inferenceAdminClient, putModelForTestingInference, true); + makeRequest(inferenceAdminClient, inferRequest, true); + } + + try (RestClient inferenceUserClient = buildClient(inferenceUserClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(inferenceUserClient, putRequest, false); + makeRequest(inferenceUserClient, getAllRequest, true); + makeRequest(inferenceUserClient, inferRequest, true); + makeRequest(inferenceUserClient, deleteInferenceModel, false); + } + + try (RestClient unprivilegedClient = buildClient(unprivilegedUserClientSettings(), getClusterHosts().toArray(new HttpHost[0]))) { + makeRequest(unprivilegedClient, putRequest, false); + makeRequest(unprivilegedClient, getAllRequest, false); + makeRequest(unprivilegedClient, inferRequest, false); + makeRequest(unprivilegedClient, deleteInferenceModel, false); + } + } + + private Settings inferenceAdminClientSettings() { + String token = basicAuthHeaderValue("test_inference_admin", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private Settings inferenceUserClientSettings() { + String token = basicAuthHeaderValue("test_inference_user", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + private Settings unprivilegedUserClientSettings() { + String token = basicAuthHeaderValue("test_no_privileged", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + /* + * This makes the given request with the given client. It asserts a 200 response if expectSuccess is true, and asserts an exception + * with a 403 response if expectStatus is false. + */ + private void makeRequest(RestClient client, Request request, boolean expectSuccess) throws IOException { + if (expectSuccess) { + Response response = client.performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + } else { + ResponseException exception = expectThrows(ResponseException.class, () -> client.performRequest(request)); + assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.FORBIDDEN.getStatus())); + } + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml index 319b84e855aaf..7e65691ea17dd 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/privileges/11_builtin.yml @@ -15,5 +15,5 @@ setup: # This is fragile - it needs to be updated every time we add a new cluster/index privilege # I would much prefer we could just check that specific entries are in the array, but we don't have # an assertion for that - - length: { "cluster" : 55 } + - length: { "cluster" : 57 } - length: { "index" : 22 } From 9315f8022dcdcf5978fa1213ac8af4d97ca56178 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Wed, 20 Mar 2024 13:20:54 +0100 Subject: [PATCH 059/214] Enhance search tier GC options (#106526) For small nodes, we need a bit more wiggle room for new size and concurrent GC threads in order to stay below real memory circuit breaker limits on dedicated search nodes. ES-8087 --- .../server/cli/JvmErgonomics.java | 24 +++++- .../server/cli/JvmOptionsParser.java | 2 +- .../server/cli/JvmErgonomicsTests.java | 82 +++++++++++++++++-- docs/changelog/106526.yaml | 5 ++ 4 files changed, 104 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/106526.yaml diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java index 926d5727a1b4a..ec39c0fc89ac2 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmErgonomics.java @@ -8,6 +8,12 @@ package org.elasticsearch.server.cli; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.node.NodeRoleSettings; + import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -31,7 +37,7 @@ private JvmErgonomics() { * @param userDefinedJvmOptions A list of JVM options that have been defined by the user. * @return A list of additional JVM options to set. */ - static List choose(final List userDefinedJvmOptions) throws InterruptedException, IOException { + static List choose(final List userDefinedJvmOptions, Settings nodeSettings) throws InterruptedException, IOException { final List ergonomicChoices = new ArrayList<>(); final Map finalJvmOptions = JvmOption.findFinalOptions(userDefinedJvmOptions); final long heapSize = JvmOption.extractMaxHeapSize(finalJvmOptions); @@ -55,6 +61,22 @@ static List choose(final List userDefinedJvmOptions) throws Inte ergonomicChoices.add("-XX:G1ReservePercent=" + tuneG1GCReservePercent); } + boolean isSearchTier = NodeRoleSettings.NODE_ROLES_SETTING.get(nodeSettings).contains(DiscoveryNodeRole.SEARCH_ROLE); + // override new percentage on small heaps on search tier to increase chance of staying free of the real memory circuit breaker limit + if (isSearchTier && heapSize < ByteSizeUnit.GB.toBytes(5)) { + ergonomicChoices.add("-XX:+UnlockExperimentalVMOptions"); + ergonomicChoices.add("-XX:G1NewSizePercent=10"); + } + + // for dedicated search, using just 1 conc gc thread is not always enough to keep us below real memory breaker limit + // jvm use (2+processsors) / 4 (for small processor counts), so only affects 4/5 processors (for now) + if (EsExecutors.NODE_PROCESSORS_SETTING.exists(nodeSettings)) { + int allocated = EsExecutors.allocatedProcessors(nodeSettings); + if (allocated >= 4 && allocated <= 5 && isSearchTier) { + ergonomicChoices.add("-XX:ConcGCThreads=2"); + } + } + return ergonomicChoices; } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index d312fae4456f1..35f3f62122f0c 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -139,7 +139,7 @@ private List jvmOptions( new OverridableSystemMemoryInfo(substitutedJvmOptions, new DefaultSystemMemoryInfo()) ); substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); - final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); + final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions, args.nodeSettings()); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(args.nodeSettings(), cliSysprops); final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), args.secrets(), args.logsDir(), tmpDir); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java index 0d4edfc384d46..c94aa0d9f68af 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java @@ -9,6 +9,10 @@ package org.elasticsearch.server.cli; import org.apache.lucene.tests.util.LuceneTestCase.SuppressFileSystems; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.node.NodeRoleSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; @@ -26,6 +30,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; @@ -106,7 +111,7 @@ public void testExtractSystemProperties() { } public void testG1GOptionsForSmallHeap() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC"), Settings.EMPTY); assertThat(jvmErgonomics, hasItem("-XX:G1HeapRegionSize=4m")); assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30")); assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=15")); @@ -114,7 +119,8 @@ public void testG1GOptionsForSmallHeap() throws Exception { public void testG1GOptionsForSmallHeapWhenTuningSet() throws Exception { List jvmErgonomics = JvmErgonomics.choose( - Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC", "-XX:G1HeapRegionSize=4m", "-XX:InitiatingHeapOccupancyPercent=45") + Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseG1GC", "-XX:G1HeapRegionSize=4m", "-XX:InitiatingHeapOccupancyPercent=45"), + Settings.EMPTY ); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); @@ -122,14 +128,14 @@ public void testG1GOptionsForSmallHeapWhenTuningSet() throws Exception { } public void testG1GOptionsForLargeHeap() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC"), Settings.EMPTY); assertThat(jvmErgonomics, hasItem("-XX:InitiatingHeapOccupancyPercent=30")); assertThat(jvmErgonomics, hasItem("-XX:G1ReservePercent=25")); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); } public void testG1GOptionsForSmallHeapWhenOtherGCSet() throws Exception { - List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseParallelGC")); + List jvmErgonomics = JvmErgonomics.choose(Arrays.asList("-Xms6g", "-Xmx6g", "-XX:+UseParallelGC"), Settings.EMPTY); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1HeapRegionSize=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent=")))); @@ -137,7 +143,8 @@ public void testG1GOptionsForSmallHeapWhenOtherGCSet() throws Exception { public void testG1GOptionsForLargeHeapWhenTuningSet() throws Exception { List jvmErgonomics = JvmErgonomics.choose( - Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC", "-XX:InitiatingHeapOccupancyPercent=60", "-XX:G1ReservePercent=10") + Arrays.asList("-Xms8g", "-Xmx8g", "-XX:+UseG1GC", "-XX:InitiatingHeapOccupancyPercent=60", "-XX:G1ReservePercent=10"), + Settings.EMPTY ); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:InitiatingHeapOccupancyPercent=")))); assertThat(jvmErgonomics, everyItem(not(startsWith("-XX:G1ReservePercent=")))); @@ -168,18 +175,79 @@ public void testMaxDirectMemorySizeChoice() throws Exception { ); final String heapSize = randomFrom(heapMaxDirectMemorySize.keySet().toArray(String[]::new)); assertThat( - JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize)), + JvmErgonomics.choose(Arrays.asList("-Xms" + heapSize, "-Xmx" + heapSize), Settings.EMPTY), hasItem("-XX:MaxDirectMemorySize=" + heapMaxDirectMemorySize.get(heapSize)) ); } public void testMaxDirectMemorySizeChoiceWhenSet() throws Exception { assertThat( - JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g")), + JvmErgonomics.choose(Arrays.asList("-Xms1g", "-Xmx1g", "-XX:MaxDirectMemorySize=1g"), Settings.EMPTY), everyItem(not(startsWith("-XX:MaxDirectMemorySize="))) ); } + public void testConcGCThreadsNotSetBasedOnProcessors() throws Exception { + Settings.Builder nodeSettingsBuilder = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()); + if (randomBoolean()) { + nodeSettingsBuilder.put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), randomBoolean() ? between(1, 3) : between(6, 100)); + } + assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); + } + + public void testConcGCThreadsNotSetBasedOnRoles() throws Exception { + Settings.Builder nodeSettingsBuilder = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5)); + if (randomBoolean()) { + nodeSettingsBuilder.put( + NodeRoleSettings.NODE_ROLES_SETTING.getKey(), + randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() + ); + } + assertThat(JvmErgonomics.choose(List.of(), nodeSettingsBuilder.build()), everyItem(not(startsWith("-XX:ConcGCThreads=")))); + + } + + public void testConcGCThreadsSet() throws Exception { + Settings nodeSettings = Settings.builder() + .put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), between(4, 5)) + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + assertThat(JvmErgonomics.choose(List.of(), nodeSettings), hasItem("-XX:ConcGCThreads=2")); + } + + public void testMinimumNewSizeNotSetBasedOnHeap() throws Exception { + Settings nodeSettings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(5, 31) + "g"), nodeSettings); + assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions")))); + assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent=")))); + } + + public void testMinimumNewSizeNotSetBasedOnRoles() throws Exception { + Settings nodeSettings = randomBoolean() + ? Settings.EMPTY + : Settings.builder() + .put( + NodeRoleSettings.NODE_ROLES_SETTING.getKey(), + randomValueOtherThan(DiscoveryNodeRole.SEARCH_ROLE, () -> randomFrom(DiscoveryNodeRole.roles())).roleName() + ) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings); + assertThat(chosen, everyItem(not(is("-XX:+UnlockExperimentalVMOptions")))); + assertThat(chosen, everyItem(not(startsWith("-XX:G1NewSizePercent=")))); + } + + public void testMinimumNewSizeSet() throws Exception { + Settings nodeSettings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.SEARCH_ROLE.roleName()) + .build(); + List chosen = JvmErgonomics.choose(List.of("-Xmx" + between(1, 4) + "g"), nodeSettings); + assertThat(chosen, hasItem("-XX:+UnlockExperimentalVMOptions")); + assertThat(chosen, hasItem("-XX:G1NewSizePercent=10")); + } + @SuppressWarnings("ConstantConditions") public void testMissingOptionHandling() { final Map g1GcOn = Map.of("UseG1GC", new JvmOption("true", "")); diff --git a/docs/changelog/106526.yaml b/docs/changelog/106526.yaml new file mode 100644 index 0000000000000..ac98454b5d8b4 --- /dev/null +++ b/docs/changelog/106526.yaml @@ -0,0 +1,5 @@ +pr: 106526 +summary: Enhance search tier GC options +area: Infra/CLI +type: enhancement +issues: [] From 82221ffcea12982180e40c5a83449097d520e5b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Wed, 20 Mar 2024 13:35:05 +0100 Subject: [PATCH 060/214] Ensure ILM policy is installed before starting the tests. (#106523) --- .../application/FullClusterRestartIT.java | 41 ++++++++++++++++--- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index 95b3b576eb46c..e01b8de941851 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -66,10 +66,10 @@ public void testBehavioralAnalyticsDataRetention() throws Exception { String legacyAnalyticsCollectionName = "oldstuff"; String newAnalyticsCollectionName = "newstuff"; - if (isRunningAgainstOldCluster()) { - // Ensure index template is installed before executing the tests. - assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME)); + // Wait for the cluster to finish initialization + waitForClusterReady(); + if (isRunningAgainstOldCluster()) { // Create an analytics collection Request legacyPutRequest = new Request("PUT", "_application/analytics/" + legacyAnalyticsCollectionName); assertOK(client().performRequest(legacyPutRequest)); @@ -77,9 +77,6 @@ public void testBehavioralAnalyticsDataRetention() throws Exception { // Validate that ILM lifecycle is in place assertBusy(() -> assertUsingLegacyDataRetentionPolicy(legacyAnalyticsCollectionName)); } else { - // Ensure index template is updated to version 3 before executing the tests. - assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME, DSL_REGISTRY_VERSION)); - // Create a new analytics collection Request putRequest = new Request("PUT", "_application/analytics/" + newAnalyticsCollectionName); assertOK(client().performRequest(putRequest)); @@ -129,6 +126,21 @@ private void assertUsingDslDataRetention(String analyticsCollectionName) throws assertTrue(evaluatedNewDataStream); } + private void waitForClusterReady() throws Exception { + // Ensure index template is installed with the right version before executing the tests. + if (isRunningAgainstOldCluster()) { + // No minimum version of the registry required when running on old clusters. + assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME)); + + // When running on old cluster, wait for the ILM policy to be installed. + assertBusy(() -> assertILMPolicyExists(EVENT_DATA_STREAM_LEGACY_ILM_POLICY_NAME)); + } else { + // DSL has been introduced with the version 3 of the registry. + // Wait for this version to be deployed. + assertBusy(() -> assertDataStreamTemplateExists(EVENT_DATA_STREAM_LEGACY_TEMPLATE_NAME, DSL_REGISTRY_VERSION)); + } + } + private void assertDataStreamTemplateExists(String templateName) throws IOException { assertDataStreamTemplateExists(templateName, null); } @@ -138,6 +150,7 @@ private void assertDataStreamTemplateExists(String templateName, Integer minVers Request getIndexTemplateRequest = new Request("GET", "_index_template/" + templateName); Response response = client().performRequest(getIndexTemplateRequest); assertOK(response); + if (minVersion != null) { String pathToVersion = "index_templates.0.index_template.version"; ObjectPath indexTemplatesResponse = ObjectPath.createFromResponse(response); @@ -151,4 +164,20 @@ private void assertDataStreamTemplateExists(String templateName, Integer minVers throw e; } } + + private void assertILMPolicyExists(String policyName) throws IOException { + try { + Request getILMPolicyRequest = new Request("GET", "_ilm/policy/" + policyName); + Response response = client().performRequest(getILMPolicyRequest); + assertOK(response); + + assertNotNull(ObjectPath.createFromResponse(response).evaluate(policyName)); + } catch (ResponseException e) { + int status = e.getResponse().getStatusLine().getStatusCode(); + if (status == 404) { + throw new AssertionError("Waiting for the policy to be created"); + } + throw e; + } + } } From cf5fbfc857b36438baf357a31a7c2bc960111797 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 13:28:59 +0000 Subject: [PATCH 061/214] Force execution of `SearchService.Reaper` (#106544) If the search threadpool fills up then we may reject execution of `SearchService.Reaper` which means it stops retrying. We must instead force its execution so that it keeps on going. With #106542, closes #106543 --- docs/changelog/106544.yaml | 6 +++++ .../elasticsearch/search/SearchService.java | 27 +++++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/106544.yaml diff --git a/docs/changelog/106544.yaml b/docs/changelog/106544.yaml new file mode 100644 index 0000000000000..6557ba478126d --- /dev/null +++ b/docs/changelog/106544.yaml @@ -0,0 +1,6 @@ +pr: 106544 +summary: Force execution of `SearchService.Reaper` +area: Search +type: bug +issues: + - 106543 diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 70a002d676235..d11a5b9908514 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -40,8 +40,10 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -1578,9 +1580,9 @@ void addResultsObject(SearchContext context) { abstract void addResultsObject(SearchContext context); } - class Reaper implements Runnable { + class Reaper extends AbstractRunnable { @Override - public void run() { + protected void doRun() { assert Transports.assertNotTransportThread("closing contexts may do IO, e.g. deleting dangling files") && ThreadPool.assertNotScheduleThread("closing contexts may do IO, e.g. deleting dangling files"); for (ReaderContext context : activeReaders.values()) { @@ -1590,6 +1592,27 @@ public void run() { } } } + + @Override + public void onFailure(Exception e) { + logger.error("unexpected error when freeing search contexts", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + if (e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown()) { + logger.debug("rejected execution when freeing search contexts"); + } else { + onFailure(e); + } + } + + @Override + public boolean isForceExecution() { + // mustn't reject this task even if the queue is full + return true; + } } public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { From 2e8482afd6e1296a2bdb1eca43321a4a18ad5cd0 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 13:47:14 +0000 Subject: [PATCH 062/214] Integrate threadpool scheduling with `AbstractRunnable` (#106542) Today `ThreadPool#scheduleWithFixedDelay` does not interact as expected with `AbstractRunnable`: if the task fails or is rejected then this isn't passed back to the relevant callback, and the task cannot specify that it should be force-executed. This commit fixes that. --- .../elasticsearch/threadpool/Scheduler.java | 23 ++- .../elasticsearch/threadpool/ThreadPool.java | 13 +- .../threadpool/ThreadPoolTests.java | 154 ++++++++++++++++++ 3 files changed, 182 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 438a32a2b8630..33f3f6d631455 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -107,7 +107,7 @@ static boolean awaitTermination( * not be interrupted. */ default Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, Executor executor) { - var runnable = new ReschedulingRunnable(command, interval, executor, this, (e) -> {}, (e) -> {}); + var runnable = new ReschedulingRunnable(command, interval, executor, this, e -> {}, e -> {}); runnable.start(); return runnable; } @@ -226,13 +226,25 @@ public void doRun() { @Override public void onFailure(Exception e) { - failureConsumer.accept(e); + try { + if (runnable instanceof AbstractRunnable abstractRunnable) { + abstractRunnable.onFailure(e); + } + } finally { + failureConsumer.accept(e); + } } @Override public void onRejection(Exception e) { run = false; - rejectionConsumer.accept(e); + try { + if (runnable instanceof AbstractRunnable abstractRunnable) { + abstractRunnable.onRejection(e); + } + } finally { + rejectionConsumer.accept(e); + } } @Override @@ -247,6 +259,11 @@ public void onAfter() { } } + @Override + public boolean isForceExecution() { + return runnable instanceof AbstractRunnable abstractRunnable && abstractRunnable.isForceExecution(); + } + @Override public String toString() { return "ReschedulingRunnable{" + "runnable=" + runnable + ", interval=" + interval + '}'; diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 97c9ce755c130..cf554fe81d4a3 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -597,11 +597,14 @@ public void scheduleUnlessShuttingDown(TimeValue delay, Executor executor, Runna } public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, Executor executor) { - var runnable = new ReschedulingRunnable(command, interval, executor, this, (e) -> { - if (logger.isDebugEnabled()) { - logger.debug(() -> format("scheduled task [%s] was rejected on thread pool [%s]", command, executor), e); - } - }, (e) -> logger.warn(() -> format("failed to run scheduled task [%s] on thread pool [%s]", command, executor), e)); + var runnable = new ReschedulingRunnable( + command, + interval, + executor, + this, + e -> logger.debug(() -> format("scheduled task [%s] was rejected on thread pool [%s]", command, executor), e), + e -> logger.warn(() -> format("failed to run scheduled task [%s] on thread pool [%s]", command, executor), e) + ); runnable.start(); return runnable; } diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index baa03ddf1abcd..c6f3d752d103c 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -11,10 +11,14 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.TaskExecutionTimeTrackingEsThreadPoolExecutor; import org.elasticsearch.core.TimeValue; @@ -25,7 +29,10 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedTransferQueue; import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DEFAULT; +import static org.elasticsearch.common.util.concurrent.EsExecutors.TaskTrackingConfig.DO_NOT_TRACK; import static org.elasticsearch.threadpool.ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING; import static org.elasticsearch.threadpool.ThreadPool.LATE_TIME_INTERVAL_WARN_THRESHOLD_SETTING; import static org.elasticsearch.threadpool.ThreadPool.assertCurrentMethodIsNotCalledRecursively; @@ -405,4 +412,151 @@ public void testSearchWorkedThreadPool() { assertTrue(terminate(threadPool)); } } + + public void testScheduledOneShotRejection() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + threadPool.schedule( + ActionRunnable.run(future, () -> fail("should not execute")), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future, 10, TimeUnit.SECONDS)); + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledOneShotForceExecution() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + threadPool.schedule( + forceExecution(ActionRunnable.run(future, () -> {})), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + Thread.yield(); + assertFalse(future.isDone()); + + latch.countDown(); + FutureUtils.get(future, 10, TimeUnit.SECONDS); // shouldn't throw + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledFixedDelayRejection() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + threadPool.scheduleWithFixedDelay( + ActionRunnable.wrap(future, ignored -> Thread.yield()), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + while (future.isDone() == false) { + // might not block all threads the first time round if the scheduled runnable is running, so must keep trying + blockExecution(threadPool.executor(name), latch); + } + expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future)); + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + public void testScheduledFixedDelayForceExecution() { + final var name = "fixed-bounded"; + final var threadPool = new TestThreadPool( + getTestName(), + new FixedExecutorBuilder(Settings.EMPTY, name, between(1, 5), between(1, 5), randomFrom(DEFAULT, DO_NOT_TRACK)) + ); + + final var future = new PlainActionFuture(); + final var latch = new CountDownLatch(1); + try { + blockExecution(threadPool.executor(name), latch); + + threadPool.scheduleWithFixedDelay( + forceExecution(ActionRunnable.run(future, Thread::yield)), + TimeValue.timeValueMillis(between(1, 100)), + threadPool.executor(name) + ); + + assertFalse(future.isDone()); + + latch.countDown(); + FutureUtils.get(future, 10, TimeUnit.SECONDS); // shouldn't throw + } finally { + latch.countDown(); + assertTrue(terminate(threadPool)); + } + } + + private static AbstractRunnable forceExecution(AbstractRunnable delegate) { + return new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + delegate.onFailure(e); + } + + @Override + protected void doRun() { + delegate.run(); + } + + @Override + public void onRejection(Exception e) { + delegate.onRejection(e); + } + + @Override + public void onAfter() { + delegate.onAfter(); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + } + + private static void blockExecution(ExecutorService executor, CountDownLatch latch) { + while (true) { + try { + executor.execute(() -> safeAwait(latch)); + } catch (EsRejectedExecutionException e) { + break; + } + } + } + } From 61e813445af62957a5029bbf8c36082d6bdb6d5e Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Wed, 20 Mar 2024 14:58:15 +0100 Subject: [PATCH 063/214] Unmute SimpleThreadPoolIT.testThreadPoolMetrics with additional logging (#106522) Add additional logging and more vocal assertions to better understand test failures (related to #104652) --- .../threadpool/SimpleThreadPoolIT.java | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index c9c648e57169a..d987db5b5466f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -28,12 +28,19 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.function.Function; import java.util.regex.Pattern; +import static java.util.function.Function.identity; +import static org.elasticsearch.common.util.Maps.toUnmodifiableSortedMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesRegex; @@ -111,7 +118,8 @@ public void testThreadNames() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104652") + // temporarily re-enable to gather more data on test failures likely caused by diverging thread pool stats + // at the time stats are collected vs when measurements are taken. public void testThreadPoolMetrics() throws Exception { internalCluster().startNode(); @@ -147,30 +155,36 @@ public void testThreadPoolMetrics() throws Exception { plugin.collect(); ArrayList registeredMetrics = plugin.getRegisteredMetrics(InstrumentType.LONG_GAUGE); registeredMetrics.addAll(plugin.getRegisteredMetrics(InstrumentType.LONG_ASYNC_COUNTER)); + tps.forEach(stats -> { - Map threadPoolMetrics = Map.of( - ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, - stats.completed(), - ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, - (long) stats.active(), - ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, - (long) stats.threads(), - ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, - (long) stats.largest(), - ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, - (long) stats.queue() - ); - threadPoolMetrics.forEach((suffix, value) -> { - String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + stats.name() + suffix; - List measurements; - if (suffix.equals(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED)) { - measurements = plugin.getLongAsyncCounterMeasurement(metricName); - } else { - measurements = plugin.getLongGaugeMeasurement(metricName); - } + Map threadPoolStats = List.of( + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, stats.completed()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, (long) stats.active()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, (long) stats.threads()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, (long) stats.largest()), + Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, (long) stats.queue()) + ).stream().collect(toUnmodifiableSortedMap(Entry::getKey, Entry::getValue)); + + Function> measurementExtractor = name -> { + String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + stats.name() + name; assertThat(metricName, in(registeredMetrics)); - assertThat(measurements.get(0).getLong(), greaterThanOrEqualTo(value)); - }); + + List measurements = name.equals(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED) + ? plugin.getLongAsyncCounterMeasurement(metricName) + : plugin.getLongGaugeMeasurement(metricName); + return measurements.stream().map(Measurement::getLong).toList(); + }; + + Map> measurements = threadPoolStats.keySet() + .stream() + .collect(toUnmodifiableSortedMap(identity(), measurementExtractor)); + + logger.info("Stats of `{}`: {}", stats.name(), threadPoolStats); + logger.info("Measurements of `{}`: {}", stats.name(), measurements); + + threadPoolStats.forEach( + (metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(greaterThanOrEqualTo(value)))) + ); }); } From 09b6072afd8a8f89f53e8f1211c95f0f3637e84e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Wed, 20 Mar 2024 15:07:41 +0100 Subject: [PATCH 064/214] [DOCS] Changes Cohere inference examples in tutorial and API docs (#106524) --- docs/reference/inference/put-inference.asciidoc | 5 ++--- .../tab-widgets/inference-api/infer-api-task.asciidoc | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 399a0594987d2..0aeb478dcafd2 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -101,6 +101,7 @@ the same name and the updated API key. (Optional, string) Specifies the types of embeddings you want to get back. Defaults to `float`. Valid values are: + * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). * `float`: use it for the default float embeddings. * `int8`: use it for signed int8 embeddings. @@ -257,9 +258,7 @@ PUT _inference/text_embedding/cohere-embeddings "service_settings": { "api_key": "", "model": "embed-english-light-v3.0", - "embedding_type": "int8" - }, - "task_settings": { + "embedding_type": "byte" } } ------------------------------------------------------------ diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index be0319fcf1ec1..b39554bb2c83f 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -8,9 +8,7 @@ PUT _inference/text_embedding/cohere_embeddings <1> "service_settings": { "api_key": "", <2> "model_id": "embed-english-v3.0", <3> - "embedding_type": "int8" - }, - "task_settings": { + "embedding_type": "byte" } } ------------------------------------------------------------ From 5d05d8185450a0d741ce688673e05f0eb9360d97 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 20 Mar 2024 10:15:49 -0400 Subject: [PATCH 065/214] [ES|QL] Remove variadic functions' optional args from the output of show functions (#106454) * remove optional args from the output of show functions optionalArgs, argNames and argTypes for variadic functions * consistent names for arguments --- .../esql/functions/signature/concat.svg | 2 +- .../esql/functions/types/case.asciidoc | 2 +- .../esql/functions/types/concat.asciidoc | 2 +- .../src/main/resources/meta.csv-spec | 306 +++++++++--------- .../expression/function/aggregate/Avg.java | 2 +- .../expression/function/aggregate/Max.java | 2 +- .../expression/function/aggregate/Median.java | 2 +- .../aggregate/MedianAbsoluteDeviation.java | 2 +- .../expression/function/aggregate/Min.java | 2 +- .../function/aggregate/Percentile.java | 2 +- .../expression/function/aggregate/Sum.java | 2 +- .../function/scalar/conditional/Case.java | 2 +- .../function/scalar/conditional/Greatest.java | 3 +- .../function/scalar/conditional/Least.java | 3 +- .../function/scalar/convert/ToBoolean.java | 2 +- .../scalar/convert/ToCartesianPoint.java | 2 +- .../scalar/convert/ToCartesianShape.java | 2 +- .../function/scalar/convert/ToDatetime.java | 2 +- .../function/scalar/convert/ToDegrees.java | 2 +- .../function/scalar/convert/ToDouble.java | 5 +- .../function/scalar/convert/ToGeoPoint.java | 2 +- .../function/scalar/convert/ToGeoShape.java | 2 +- .../function/scalar/convert/ToIP.java | 2 +- .../function/scalar/convert/ToInteger.java | 5 +- .../function/scalar/convert/ToLong.java | 5 +- .../function/scalar/convert/ToRadians.java | 2 +- .../function/scalar/convert/ToString.java | 2 +- .../scalar/convert/ToUnsignedLong.java | 5 +- .../function/scalar/convert/ToVersion.java | 2 +- .../function/scalar/date/DateExtract.java | 4 +- .../function/scalar/date/DateFormat.java | 2 +- .../expression/function/scalar/math/Abs.java | 2 +- .../expression/function/scalar/math/Acos.java | 6 +- .../expression/function/scalar/math/Asin.java | 6 +- .../expression/function/scalar/math/Atan.java | 2 +- .../function/scalar/math/Atan2.java | 4 +- .../expression/function/scalar/math/Ceil.java | 2 +- .../expression/function/scalar/math/Cos.java | 2 +- .../expression/function/scalar/math/Cosh.java | 2 +- .../function/scalar/math/Floor.java | 2 +- .../expression/function/scalar/math/Log.java | 4 +- .../function/scalar/math/Log10.java | 2 +- .../function/scalar/math/Round.java | 2 +- .../expression/function/scalar/math/Sin.java | 2 +- .../expression/function/scalar/math/Sinh.java | 2 +- .../expression/function/scalar/math/Sqrt.java | 2 +- .../expression/function/scalar/math/Tan.java | 2 +- .../expression/function/scalar/math/Tanh.java | 2 +- .../function/scalar/multivalue/MvAvg.java | 2 +- .../function/scalar/multivalue/MvConcat.java | 2 +- .../function/scalar/multivalue/MvCount.java | 2 +- .../function/scalar/multivalue/MvDedupe.java | 5 +- .../function/scalar/multivalue/MvFirst.java | 2 +- .../function/scalar/multivalue/MvLast.java | 2 +- .../function/scalar/multivalue/MvMax.java | 2 +- .../function/scalar/multivalue/MvMedian.java | 2 +- .../function/scalar/multivalue/MvMin.java | 2 +- .../function/scalar/multivalue/MvSlice.java | 2 +- .../function/scalar/multivalue/MvSum.java | 2 +- .../function/scalar/multivalue/MvZip.java | 4 +- .../function/scalar/nulls/Coalesce.java | 7 +- .../function/scalar/string/Concat.java | 4 +- .../function/scalar/string/LTrim.java | 2 +- .../function/scalar/string/Left.java | 4 +- .../function/scalar/string/Length.java | 2 +- .../function/scalar/string/RTrim.java | 2 +- .../function/scalar/string/Replace.java | 4 +- .../function/scalar/string/Right.java | 2 +- .../function/scalar/string/Split.java | 2 +- .../function/scalar/string/Substring.java | 2 +- .../function/scalar/string/Trim.java | 2 +- .../esql/plan/logical/meta/MetaFunctions.java | 13 +- 72 files changed, 267 insertions(+), 236 deletions(-) diff --git a/docs/reference/esql/functions/signature/concat.svg b/docs/reference/esql/functions/signature/concat.svg index 3ad2ae37b11c3..fec245895aa08 100644 --- a/docs/reference/esql/functions/signature/concat.svg +++ b/docs/reference/esql/functions/signature/concat.svg @@ -1 +1 @@ -CONCAT(first,rest) \ No newline at end of file +CONCAT(string1,string2) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 4190128d49b93..0edfc4f5de29b 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -4,6 +4,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -condition | rest | result +condition | value | result |=== diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index d3d559cf9036e..238351b49e2c8 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -first | rest | result +string1 | string2 | result keyword | keyword | keyword text | text | keyword |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index a08cc92ef238f..850e573e84fc2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -3,102 +3,102 @@ metaFunctions#[skip:-8.13.99] meta functions; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false -asin |"double asin(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false -atan |"double atan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false -atan2 |"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" |[y, x] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false +abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false +asin |"double asin(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false +atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false +atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false -avg |"double avg(field:double|integer|long)" |field |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true -case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, rest] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false -ceil |"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true +case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false +ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false -coalesce |"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" |[expression, expressionX] |["boolean|text|integer|keyword|long", "boolean|text|integer|keyword|long"] |["Expression to evaluate", "Other expression to evaluate"] |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | [false, false] | true | false -concat |"keyword concat(first:keyword|text, rest...:keyword|text)" |[first, rest] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false -cosh |"double cosh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false +concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false +cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false +cosh |"double cosh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |long date_extract(date_part:keyword, field:date) |[date_part, field] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |keyword date_format(?format:keyword, date:date) |[format, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_extract |long date_extract(datePart:keyword, date:date) |[datePart, date] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |keyword date_format(?dateFormat:keyword, date:date) |[dateFormat, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false e |double e() | null | null | null |double | "Euler’s number." | null | false | false ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false -floor |"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false -greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | [false, false] | true | false -least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false -left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["The string from which to return a substring.", "The number of characters to return."] |keyword | "Returns the substring that extracts 'length' chars from 'str' starting from the left." | [false, false] | false | false -length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false -log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false -log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false -ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false -max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true -median |"double|integer|long median(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true -median_absolute_deviation|"double|integer|long median_absolute_deviation(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true -min |"double|integer|long min(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true -mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false -mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false -mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false -mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false -mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false -mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false -mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false -mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false -mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false -mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[v, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false +floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false +greatest |"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the maximum value from many columns." | false | true | false +least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" | first |"integer|long|double|boolean|keyword|text|ip|version" |"" |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | false | true | false +left |"keyword left(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["The string from which to return a substring.", "The number of characters to return."] |keyword | "Returns the substring that extracts 'length' chars from 'string' starting from the left." | [false, false] | false | false +length |"integer length(string:keyword|text)" |string |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log |"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a number to a base." | [true, false] | false | false +log10 |"double log10(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false +ltrim |"keyword|text ltrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false +max |"double|integer|long max(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true +median |"double|integer|long median(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The value that is greater than half of all values and less than half of all values." | false | false | true +median_absolute_deviation|"double|integer|long median_absolute_deviation(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The median absolute deviation, a measure of variability." | false | false | true +min |"double|integer|long min(number:double|integer|long)" |number |"double|integer|long" | "" |"double|integer|long" | "The minimum value of a numeric field." | false | false | true +mv_avg |"double mv_avg(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false +mv_concat |"keyword mv_concat(string:text|keyword, delim:text|keyword)" |[string, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false +mv_count |"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false +mv_median |"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false +mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |field | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false +mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" |[field, start, end] | "[boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, integer, integer]" | "[A multivalued field, start index, end index (included)]" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" | "Returns a subset of the multivalued field using the start and end index values." | [false, false, true] | false | false mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" | [field, order] | ["boolean|date|double|integer|ip|keyword|long|text|version", "keyword"] | ["A multivalued field", "sort order"] |"boolean|date|double|integer|ip|keyword|long|text|version" | "Sorts a multivalued field in lexicographical order." | [false, true] | false | false -mv_sum |"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false -mv_zip |"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" |[mvLeft, mvRight, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false +mv_sum |"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the sum of all of the values." | false | false | false +mv_zip |"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" |[string1, string2, delim] | ["keyword|text", "keyword|text", "keyword|text"] | [A multivalued field, A multivalued field, delimiter] | "keyword" | "Combines the values from two multivalued fields with a delimiter that joins them together." | [false, false, true] | false | false now |date now() | null |null | null |date | "Returns current date and time." | null | false | false -percentile |"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" |[field, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true +percentile |"double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" |[number, percentile] |["double|integer|long, double|integer|long"] |["", ""] |"double|integer|long" | "The value at which a certain percentage of observed values occur." | [false, false] | false | true pi |double pi() | null | null | null |double | "The ratio of a circle’s circumference to its diameter." | null | false | false pow |"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["", ""] |double | "Returns the value of a base raised to the power of an exponent." | [false, false] | false | false -replace |"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" | [str, regex, newStr] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false -right |"keyword right(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false -round |"double round(value:double, ?decimals:integer)" |[value, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false -rtrim |"keyword|text rtrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false -sinh |"double sinh(n:double|integer|long|unsigned_long)"|n |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false -split |"keyword split(str:keyword|text, delim:keyword|text)" |[str, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false -sqrt |"double sqrt(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false +replace |"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" | [string, regex, newString] | ["keyword|text", "keyword|text", "keyword|text"] |["", "", ""] |keyword | "The function substitutes in the string any match of the regular expression with the replacement string." | [false, false, false]| false | false +right |"keyword right(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false +round |"double round(number:double, ?decimals:integer)" |[number, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false +rtrim |"keyword|text rtrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false +sin |"double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false +sinh |"double sinh(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false +sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false -substring |"keyword substring(str:keyword|text, start:integer, ?length:integer)" |[str, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false -sum |"long sum(field:double|integer|long)" |field |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false -tanh |"double tanh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +substring |"keyword substring(string:keyword|text, start:integer, ?length:integer)" |[string, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false +sum |"long sum(number:double|integer|long)" |number |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true +tan |"double tan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false +tanh |"double tanh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false -to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false -to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false -to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" |v |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false -to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false -to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false -to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false -to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false -to_geoshape |"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" |v |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false -to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false -to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false -to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false +to_bool |"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_boolean |"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" |field |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_cartesianshape |"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" |field |"cartesian_point|cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false +to_datetime |"date to_datetime(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_dbl |"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_degrees |"double to_degrees(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false +to_double |"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false +to_dt |"date to_dt(field:date|keyword|text|double|long|unsigned_long|integer)" |field |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false +to_geopoint |"geo_point to_geopoint(field:geo_point|keyword|text)" |field |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_geoshape |"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" |field |"geo_point|geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false +to_int |"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_integer |"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false +to_ip |"ip to_ip(field:ip|keyword|text)" |field |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false +to_long |"long to_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false -to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false -to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false -to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_radians |"double to_radians(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false +to_str |"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_ul |"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_ulong |"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false +to_unsigned_long |"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false to_upper |"keyword|text to_upper(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to upper case." |false |false | false -to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false -trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false +to_ver |"version to_ver(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +to_version |"version to_version(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false +trim |"keyword|text trim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false ; @@ -106,102 +106,102 @@ metaFunctionsSynopsis#[skip:-8.13.99] meta functions | keep synopsis; synopsis:keyword -"double|integer|long|unsigned_long abs(n:double|integer|long|unsigned_long)" -"double acos(n:double|integer|long|unsigned_long)" -"double asin(n:double|integer|long|unsigned_long)" -"double atan(n:double|integer|long|unsigned_long)" -"double atan2(y:double|integer|long|unsigned_long, x:double|integer|long|unsigned_long)" +"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" +"double acos(number:double|integer|long|unsigned_long)" +"double asin(number:double|integer|long|unsigned_long)" +"double atan(number:double|integer|long|unsigned_long)" +"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" "double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" -"double avg(field:double|integer|long)" -"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, rest...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long ceil(n:double|integer|long|unsigned_long)" -boolean cidr_match(ip:ip, blockX...:keyword) -"boolean|text|integer|keyword|long coalesce(expression:boolean|text|integer|keyword|long, expressionX...:boolean|text|integer|keyword|long)" -"keyword concat(first:keyword|text, rest...:keyword|text)" -"double cos(n:double|integer|long|unsigned_long)" -"double cosh(n:double|integer|long|unsigned_long)" +"double avg(number:double|integer|long)" +"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" +boolean cidr_match(ip:ip, blockX...:keyword) +"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" +"keyword concat(string1:keyword|text, string2...:keyword|text)" +"double cos(number:double|integer|long|unsigned_long)" +"double cosh(number:double|integer|long|unsigned_long)" "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -long date_extract(date_part:keyword, field:date) -keyword date_format(?format:keyword, date:date) +long date_extract(datePart:keyword, date:date) +keyword date_format(?dateFormat:keyword, date:date) "date date_parse(?datePattern:keyword, dateString:keyword|text)" "date date_trunc(interval:keyword, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" -"double|integer|long|unsigned_long floor(n:double|integer|long|unsigned_long)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"keyword left(str:keyword|text, length:integer)" -"integer length(str:keyword|text)" -"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" -"double log10(n:double|integer|long|unsigned_long)" -"keyword|text ltrim(str:keyword|text)" -"double|integer|long max(field:double|integer|long)" -"double|integer|long median(field:double|integer|long)" -"double|integer|long median_absolute_deviation(field:double|integer|long)" -"double|integer|long min(field:double|integer|long)" -"double mv_avg(field:double|integer|long|unsigned_long)" -"keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" -"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" +"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" +"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"keyword left(string:keyword|text, length:integer)" +"integer length(string:keyword|text)" +"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" +"double log10(number:double|integer|long|unsigned_long)" +"keyword|text ltrim(string:keyword|text)" +"double|integer|long max(number:double|integer|long)" +"double|integer|long median(number:double|integer|long)" +"double|integer|long median_absolute_deviation(number:double|integer|long)" +"double|integer|long min(number:double|integer|long)" +"double mv_avg(number:double|integer|long|unsigned_long)" +"keyword mv_concat(string:text|keyword, delim:text|keyword)" +"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|date|double|integer|ip|keyword|long|text|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" +"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" "boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" -"double|integer|long|unsigned_long mv_sum(v:double|integer|long|unsigned_long)" -"keyword mv_zip(mvLeft:keyword|text, mvRight:keyword|text, ?delim:keyword|text)" +"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" +"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" date now() -"double|integer|long percentile(field:double|integer|long, percentile:double|integer|long)" +"double|integer|long percentile(number:double|integer|long, percentile:double|integer|long)" double pi() "double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" -"keyword replace(str:keyword|text, regex:keyword|text, newStr:keyword|text)" -"keyword right(str:keyword|text, length:integer)" -"double round(value:double, ?decimals:integer)" -"keyword|text rtrim(str:keyword|text)" -"double sin(n:double|integer|long|unsigned_long)" -"double sinh(n:double|integer|long|unsigned_long)" -"keyword split(str:keyword|text, delim:keyword|text)" -"double sqrt(n:double|integer|long|unsigned_long)" +"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" +"keyword right(string:keyword|text, length:integer)" +"double round(number:double, ?decimals:integer)" +"keyword|text rtrim(string:keyword|text)" +"double sin(number:double|integer|long|unsigned_long)" +"double sinh(number:double|integer|long|unsigned_long)" +"keyword split(string:keyword|text, delim:keyword|text)" +"double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" "double st_y(point:geo_point|cartesian_point)" "boolean starts_with(str:keyword|text, prefix:keyword|text)" -"keyword substring(str:keyword|text, start:integer, ?length:integer)" -"long sum(field:double|integer|long)" -"double tan(n:double|integer|long|unsigned_long)" -"double tanh(n:double|integer|long|unsigned_long)" +"keyword substring(string:keyword|text, start:integer, ?length:integer)" +"long sum(number:double|integer|long)" +"double tan(number:double|integer|long|unsigned_long)" +"double tanh(number:double|integer|long|unsigned_long)" double tau() -"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" -"cartesian_shape to_cartesianshape(v:cartesian_point|cartesian_shape|keyword|text)" -"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" -"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"double to_degrees(v:double|integer|long|unsigned_long)" -"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(v:geo_point|keyword|text)" -"geo_shape to_geoshape(v:geo_point|geo_shape|keyword|text)" -"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"ip to_ip(v:ip|keyword|text)" -"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" +"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" +"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" +"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" +"date to_datetime(field:date|keyword|text|double|long|unsigned_long|integer)" +"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_degrees(number:double|integer|long|unsigned_long)" +"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"date to_dt(field:date|keyword|text|double|long|unsigned_long|integer)" +"geo_point to_geopoint(field:geo_point|keyword|text)" +"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" +"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"ip to_ip(field:ip|keyword|text)" +"long to_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" "keyword|text to_lower(str:keyword|text)" -"double to_radians(v:double|integer|long|unsigned_long)" -"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_radians(number:double|integer|long|unsigned_long)" +"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" "keyword|text to_upper(str:keyword|text)" -"version to_ver(v:keyword|text|version)" -"version to_version(v:keyword|text|version)" -"keyword|text trim(str:keyword|text)" +"version to_ver(field:keyword|text|version)" +"version to_version(field:keyword|text|version)" +"keyword|text trim(string:keyword|text)" ; @@ -211,8 +211,8 @@ META FUNCTIONS ; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin | "double sin(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false -sinh | "double sinh(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false +sin | "double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false +sinh | "double sinh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index 784d97f820428..aee07e6e044c6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -26,7 +26,7 @@ public class Avg extends AggregateFunction implements SurrogateExpression { @FunctionInfo(returnType = "double", description = "The average of a numeric field.", isAggregation = true) - public Avg(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Avg(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index d8ec5300c061f..00c3fd30530cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -23,7 +23,7 @@ public class Max extends NumericAggregate { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The maximum value of a numeric field.", isAggregation = true) - public Max(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Max(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java index a6f4e30a62459..7612510f056f2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Median.java @@ -31,7 +31,7 @@ public class Median extends AggregateFunction implements SurrogateExpression { description = "The value that is greater than half of all values and less than half of all values.", isAggregation = true ) - public Median(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Median(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java index ecf1a47ee9eb3..db7979ef0359c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MedianAbsoluteDeviation.java @@ -27,7 +27,7 @@ public class MedianAbsoluteDeviation extends NumericAggregate { description = "The median absolute deviation, a measure of variability.", isAggregation = true ) - public MedianAbsoluteDeviation(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public MedianAbsoluteDeviation(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 8fdce6d959b98..6bbc349e2b523 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -23,7 +23,7 @@ public class Min extends NumericAggregate { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The minimum value of a numeric field.", isAggregation = true) - public Min(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Min(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java index 96385d534edcd..22592f067ba99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Percentile.java @@ -36,7 +36,7 @@ public class Percentile extends NumericAggregate { ) public Percentile( Source source, - @Param(name = "field", type = { "double", "integer", "long" }) Expression field, + @Param(name = "number", type = { "double", "integer", "long" }) Expression field, @Param(name = "percentile", type = { "double", "integer", "long" }) Expression percentile ) { super(source, field, List.of(percentile)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index d09762947a597..e6584262183fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -29,7 +29,7 @@ public class Sum extends NumericAggregate { @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) - public Sum(Source source, @Param(name = "field", type = { "double", "integer", "long" }) Expression field) { + public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 66756ffa14c60..f00e69ddaabe4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -66,7 +66,7 @@ public Case( Source source, @Param(name = "condition", type = { "boolean" }) Expression first, @Param( - name = "rest", + name = "trueValue", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 9e89746c4a9dd..1794258402aed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -43,7 +43,8 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { public Greatest( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) List rest + @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< + Expression> rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index fa0e393ed52b9..6b4208f7b3d85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -43,7 +43,8 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { public Least( Source source, @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) List rest + @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< + Expression> rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 388ab970205ad..b5f33184d1395 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -44,7 +44,7 @@ public class ToBoolean extends AbstractConvertFunction { @FunctionInfo(returnType = "boolean", description = "Converts an input value to a boolean value.") public ToBoolean( Source source, - @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param(name = "field", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 3756c322abc4e..7a67681018727 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -33,7 +33,7 @@ public class ToCartesianPoint extends AbstractConvertFunction { ); @FunctionInfo(returnType = "cartesian_point", description = "Converts an input value to a point value.") - public ToCartesianPoint(Source source, @Param(name = "v", type = { "cartesian_point", "keyword", "text" }) Expression field) { + public ToCartesianPoint(Source source, @Param(name = "field", type = { "cartesian_point", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java index 51294b7834fc4..0e52e0870a354 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -37,7 +37,7 @@ public class ToCartesianShape extends AbstractConvertFunction { @FunctionInfo(returnType = "cartesian_shape", description = "Converts an input value to a shape value.") public ToCartesianShape( Source source, - @Param(name = "v", type = { "cartesian_point", "cartesian_shape", "keyword", "text" }) Expression field + @Param(name = "field", type = { "cartesian_point", "cartesian_shape", "keyword", "text" }) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 1ff8bc39e36f4..b8464ce8e000d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -43,7 +43,7 @@ public class ToDatetime extends AbstractConvertFunction { @FunctionInfo(returnType = "date", description = "Converts an input value to a date value.") public ToDatetime( Source source, - @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param(name = "field", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index c5e7b473f4e56..8332841518f01 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -41,7 +41,7 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ); @FunctionInfo(returnType = "double", description = "Converts a number in radians to degrees.") - public ToDegrees(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public ToDegrees(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 6a984abdad50f..d74ba553fc9d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -45,7 +45,10 @@ public class ToDouble extends AbstractConvertFunction { @FunctionInfo(returnType = "double", description = "Converts an input value to a double value.") public ToDouble( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index 16ea1235ccf59..acfaa7c3964c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -33,7 +33,7 @@ public class ToGeoPoint extends AbstractConvertFunction { ); @FunctionInfo(returnType = "geo_point", description = "Converts an input value to a geo_point value.") - public ToGeoPoint(Source source, @Param(name = "v", type = { "geo_point", "keyword", "text" }) Expression field) { + public ToGeoPoint(Source source, @Param(name = "field", type = { "geo_point", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java index 3a5d5f2012df6..e557735e6dfe1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -35,7 +35,7 @@ public class ToGeoShape extends AbstractConvertFunction { ); @FunctionInfo(returnType = "geo_shape", description = "Converts an input value to a geo_shape value.") - public ToGeoShape(Source source, @Param(name = "v", type = { "geo_point", "geo_shape", "keyword", "text" }) Expression field) { + public ToGeoShape(Source source, @Param(name = "field", type = { "geo_point", "geo_shape", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index fc6a5f5c69afa..e1d239ae93482 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -33,7 +33,7 @@ public class ToIP extends AbstractConvertFunction { ); @FunctionInfo(returnType = "ip", description = "Converts an input string to an IP value.") - public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { + public ToIP(Source source, @Param(name = "field", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 2288ddcc33a55..16b2d45c7ae26 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -47,7 +47,10 @@ public class ToInteger extends AbstractConvertFunction { @FunctionInfo(returnType = "integer", description = "Converts an input value to an integer value.") public ToInteger( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index ee7658b07b7f9..3139b2ee740b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -48,7 +48,10 @@ public class ToLong extends AbstractConvertFunction { @FunctionInfo(returnType = "long", description = "Converts an input value to a long value.") public ToLong( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index ac31cf3759ad9..0fe8795060c78 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -40,7 +40,7 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ); @FunctionInfo(returnType = "double", description = "Converts a number in degrees to radians.") - public ToRadians(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public ToRadians(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 688996dd1db00..4772949d90ef5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -64,7 +64,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper public ToString( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 656d99ee8ab80..31bbcd4bf302f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -49,7 +49,10 @@ public class ToUnsignedLong extends AbstractConvertFunction { @FunctionInfo(returnType = "unsigned_long", description = "Converts an input value to an unsigned long value.") public ToUnsignedLong( Source source, - @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index e196a91e3bac2..ca3012871fced 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -33,7 +33,7 @@ public class ToVersion extends AbstractConvertFunction { ); @FunctionInfo(returnType = "version", description = "Converts an input string to a version value.") - public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { + public ToVersion(Source source, @Param(name = "field", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 4ee178852fcd4..4734f013cef35 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -43,14 +43,14 @@ public DateExtract( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial - @Param(name = "date_part", type = { "keyword" }, description = """ + @Param(name = "datePart", type = { "keyword" }, description = """ Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.""") Expression chronoFieldExp, - @Param(name = "field", type = "date", description = "Date expression") Expression field, + @Param(name = "date", type = "date", description = "Date expression") Expression field, Configuration configuration ) { super(source, List.of(chronoFieldExp, field), configuration); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 60e1aabed3cdd..a889f5949b827 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -42,7 +42,7 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") public DateFormat( Source source, - @Param(optional = true, name = "format", type = { "keyword" }, description = "A valid date pattern") Expression format, + @Param(optional = true, name = "dateFormat", type = { "keyword" }, description = "A valid date pattern") Expression format, @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, Configuration configuration ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index d2f93abb27234..1df9406d5a1e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -23,7 +23,7 @@ public class Abs extends UnaryScalarFunction { @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Returns the absolute value.") - public Abs(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Abs(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index d2e0e8f025665..68ac9c556c2e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -24,7 +24,11 @@ public class Acos extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "The arccosine of an angle, expressed in radians.") public Acos( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "Number between -1 and 1") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Number between -1 and 1" + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 38b70cea0350c..5a4ec6b9882fb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -24,7 +24,11 @@ public class Asin extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Inverse sine trigonometric function.") public Asin( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "Number between -1 and 1") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Number between -1 and 1" + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 071379820922a..362b497b50f61 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -24,7 +24,7 @@ public class Atan extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Inverse tangent trigonometric function.") public Atan( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "A number") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "A number") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index b69a536c2df84..33a0f8bb167c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -38,8 +38,8 @@ public class Atan2 extends EsqlScalarFunction { ) public Atan2( Source source, - @Param(name = "y", type = { "double", "integer", "long", "unsigned_long" }, description = "y coordinate") Expression y, - @Param(name = "x", type = { "double", "integer", "long", "unsigned_long" }, description = "x coordinate") Expression x + @Param(name = "y_coordinate", type = { "double", "integer", "long", "unsigned_long" }, description = "y coordinate") Expression y, + @Param(name = "x_coordinate", type = { "double", "integer", "long", "unsigned_long" }, description = "x coordinate") Expression x ) { super(source, List.of(y, x)); this.y = y; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 36021ba2bfe3d..42f8a67ceead1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -31,7 +31,7 @@ */ public class Ceil extends UnaryScalarFunction { @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number up to the nearest integer.") - public Ceil(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Ceil(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 0ecc0381636ef..e928f1ae2713e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -24,7 +24,7 @@ public class Cos extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") public Cos( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 78d982acc7bb6..55250a3ac720f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -25,7 +25,7 @@ public class Cosh extends AbstractTrigonometricFunction { public Cosh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number who's hyperbolic cosine is to be returned" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index 9e5efaa611dc9..d4d71bb59ec77 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -34,7 +34,7 @@ public class Floor extends UnaryScalarFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number down to the nearest integer." ) - public Floor(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Floor(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java index 1994d0a1efc3d..f434437e12d5b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -32,11 +32,11 @@ public class Log extends EsqlScalarFunction implements OptionalArgument { private final Expression base, value; - @FunctionInfo(returnType = "double", description = "Returns the logarithm of a value to a base.") + @FunctionInfo(returnType = "double", description = "Returns the logarithm of a number to a base.") public Log( Source source, @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }, optional = true) Expression base, - @Param(name = "value", type = { "integer", "unsigned_long", "long", "double" }) Expression value + @Param(name = "number", type = { "integer", "unsigned_long", "long", "double" }) Expression value ) { super(source, value != null ? Arrays.asList(base, value) : Arrays.asList(base)); this.value = value != null ? value : base; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 1e8df6a07696f..1b41d2d407bd5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -28,7 +28,7 @@ public class Log10 extends UnaryScalarFunction { @FunctionInfo(returnType = "double", description = "Returns the log base 10.") - public Log10(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Log10(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index bdaa2cd4fc433..758b56093d40f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -47,7 +47,7 @@ public class Round extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo(returnType = "double", description = "Rounds a number to the closest number with the specified number of digits.") public Round( Source source, - @Param(name = "value", type = "double", description = "The numeric value to round") Expression field, + @Param(name = "number", type = "double", description = "The numeric value to round") Expression field, @Param( optional = true, name = "decimals", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index b3f204cfc09c8..d8b36a3d38856 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -25,7 +25,7 @@ public class Sin extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") public Sin( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 25221043f297d..0c46002f56af6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -25,7 +25,7 @@ public class Sinh extends AbstractTrigonometricFunction { public Sinh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic sine of" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 35620ff926d41..542f667c61b95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -28,7 +28,7 @@ public class Sqrt extends UnaryScalarFunction { @FunctionInfo(returnType = "double", description = "Returns the square root of a number.") - public Sqrt(Source source, @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + public Sqrt(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 528a0ae0a0e71..002de2ddfc277 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -24,7 +24,7 @@ public class Tan extends AbstractTrigonometricFunction { @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") public Tan( Source source, - @Param(name = "n", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index c77bbaedf91b8..932677ef0b26d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -25,7 +25,7 @@ public class Tanh extends AbstractTrigonometricFunction { public Tanh( Source source, @Param( - name = "n", + name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "The number to return the hyperbolic tangent of" ) Expression n diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 5e9a4e2a75878..0b4652b305741 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -35,7 +35,7 @@ public class MvAvg extends AbstractMultivalueFunction { returnType = "double", description = "Converts a multivalued field into a single valued field containing the average of all of the values." ) - public MvAvg(Source source, @Param(name = "field", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvAvg(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 5df0ac03206c4..d88c3fb1c0759 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -39,7 +39,7 @@ public class MvConcat extends BinaryScalarFunction implements EvaluatorMapper { ) public MvConcat( Source source, - @Param(name = "v", type = { "text", "keyword" }, description = "values to join") Expression field, + @Param(name = "string", type = { "text", "keyword" }, description = "values to join") Expression field, @Param(name = "delim", type = { "text", "keyword" }, description = "delimiter") Expression delim ) { super(source, field, delim); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 4fa89e66982e4..625e0a120372b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -35,7 +35,7 @@ public class MvCount extends AbstractMultivalueFunction { public MvCount( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index dc5fa0036f789..38b5ab187565a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -32,7 +32,10 @@ public class MvDedupe extends AbstractMultivalueFunction { ) public MvDedupe( Source source, - @Param(name = "v", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression field + @Param( + name = "field", + type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" } + ) Expression field ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 0f6bd847d68ed..1a6efd2924903 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -54,7 +54,7 @@ public class MvFirst extends AbstractMultivalueFunction { public MvFirst( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 2881854d17f6f..dff0108e465cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -54,7 +54,7 @@ public class MvLast extends AbstractMultivalueFunction { public MvLast( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index 0b08b99ca0687..b19888f94c6b9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -36,7 +36,7 @@ public class MvMax extends AbstractMultivalueFunction { public MvMax( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression v ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index 66a8ec13b4475..7c3cb2c1c2d1d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -39,7 +39,7 @@ public class MvMedian extends AbstractMultivalueFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Converts a multivalued field into a single valued field containing the median value." ) - public MvMedian(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvMedian(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 91a48e539042f..45eb038616b09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -36,7 +36,7 @@ public class MvMin extends AbstractMultivalueFunction { public MvMin( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" } ) Expression field ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index b7868b33102a3..29ec231f08555 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -67,7 +67,7 @@ public class MvSlice extends ScalarFunction implements OptionalArgument, Evaluat public MvSlice( Source source, @Param( - name = "v", + name = "field", type = { "boolean", "cartesian_point", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 96d7ad905c8d0..ebe23d0d79e7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -34,7 +34,7 @@ public class MvSum extends AbstractMultivalueFunction { returnType = { "double", "integer", "long", "unsigned_long" }, description = "Converts a multivalued field into a single valued field containing the sum of all of the values." ) - public MvSum(Source source, @Param(name = "v", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { + public MvSum(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java index 6227efeced36e..88e006b1dfd8d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZip.java @@ -48,8 +48,8 @@ public class MvZip extends ScalarFunction implements OptionalArgument, Evaluator ) public MvZip( Source source, - @Param(name = "mvLeft", type = { "keyword", "text" }, description = "A multivalued field") Expression mvLeft, - @Param(name = "mvRight", type = { "keyword", "text" }, description = "A multivalued field") Expression mvRight, + @Param(name = "string1", type = { "keyword", "text" }, description = "A multivalued field") Expression mvLeft, + @Param(name = "string2", type = { "keyword", "text" }, description = "A multivalued field") Expression mvRight, @Param(name = "delim", type = { "keyword", "text" }, description = "delimiter", optional = true) Expression delim ) { super(source, delim == null ? Arrays.asList(mvLeft, mvRight, COMMA) : Arrays.asList(mvLeft, mvRight, delim)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index d6c299b460f37..98dc0c7e83d93 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -50,14 +50,15 @@ public class Coalesce extends EsqlScalarFunction implements OptionalArgument { public Coalesce( Source source, @Param( - name = "expression", + name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, description = "Expression to evaluate" ) Expression first, @Param( - name = "expressionX", + name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate" + description = "Other expression to evaluate", + optional = true ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index 9cf38d3f1d960..a9f44eece2411 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -41,8 +41,8 @@ public class Concat extends EsqlScalarFunction { @FunctionInfo(returnType = "keyword", description = "Concatenates two or more strings.") public Concat( Source source, - @Param(name = "first", type = { "keyword", "text" }) Expression first, - @Param(name = "rest", type = { "keyword", "text" }) List rest + @Param(name = "string1", type = { "keyword", "text" }) Expression first, + @Param(name = "string2", type = { "keyword", "text" }) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index 7582f0cd361f6..8185cbd7eb85e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -29,7 +29,7 @@ */ public class LTrim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes leading whitespaces from a string.") - public LTrim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public LTrim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 97783bb6323d3..af3f25ee85408 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -44,12 +44,12 @@ public class Left extends EsqlScalarFunction { @FunctionInfo( returnType = "keyword", - description = "Returns the substring that extracts 'length' chars from 'str' starting from the left.", + description = "Returns the substring that extracts 'length' chars from 'string' starting from the left.", examples = { @Example(file = "string", tag = "left") } ) public Left( Source source, - @Param(name = "str", type = { "keyword", "text" }, description = "The string from which to return a substring.") Expression str, + @Param(name = "string", type = { "keyword", "text" }, description = "The string from which to return a substring.") Expression str, @Param(name = "length", type = { "integer" }, description = "The number of characters to return.") Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 8dc89a458e3be..759e6db436eda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -29,7 +29,7 @@ public class Length extends UnaryScalarFunction { @FunctionInfo(returnType = "integer", description = "Returns the character length of a string.") - public Length(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression field) { + public Length(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 857d2765bcfe4..ce93f88f71990 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -29,7 +29,7 @@ */ public class RTrim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes trailing whitespaces from a string.") - public RTrim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public RTrim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index ed368068694e2..0ed12abcc0a83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -43,9 +43,9 @@ public class Replace extends EsqlScalarFunction { ) public Replace( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "regex", type = { "keyword", "text" }) Expression regex, - @Param(name = "newStr", type = { "keyword", "text" }) Expression newStr + @Param(name = "newString", type = { "keyword", "text" }) Expression newStr ) { super(source, Arrays.asList(str, regex, newStr)); this.str = str; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 0e3e78e8188ab..13087dbd32916 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -47,7 +47,7 @@ public class Right extends EsqlScalarFunction { ) public Right( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "length", type = { "integer" }) Expression length ) { super(source, Arrays.asList(str, length)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 66d50aa4df061..4ecc7fa1a96a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -37,7 +37,7 @@ public class Split extends BinaryScalarFunction implements EvaluatorMapper { @FunctionInfo(returnType = "keyword", description = "Split a single valued string into multiple strings.") public Split( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "delim", type = { "keyword", "text" }) Expression delim ) { super(source, str, delim); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 36d24fef03006..88187b8ba65bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -41,7 +41,7 @@ public class Substring extends EsqlScalarFunction implements OptionalArgument { ) public Substring( Source source, - @Param(name = "str", type = { "keyword", "text" }) Expression str, + @Param(name = "string", type = { "keyword", "text" }) Expression str, @Param(name = "start", type = { "integer" }) Expression start, @Param(optional = true, name = "length", type = { "integer" }) Expression length ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index 1efb966bf9ce2..6d71eb9fd4baf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -29,7 +29,7 @@ */ public final class Trim extends UnaryScalarFunction { @FunctionInfo(returnType = { "keyword", "text" }, description = "Removes leading and trailing whitespaces from a string.") - public Trim(Source source, @Param(name = "str", type = { "keyword", "text" }) Expression str) { + public Trim(Source source, @Param(name = "string", type = { "keyword", "text" }) Expression str) { super(source, str); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java index 55508a7704346..34b6fd1a31b13 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java @@ -82,11 +82,16 @@ private Object collect(EsqlFunctionRegistry.FunctionDescription signature, Funct return result; } + List args = signature.args(); List result = signature.args().stream().map(x).collect(Collectors.toList()); - if (result.isEmpty() == false && result.get(0) instanceof String[]) { - List newResult = new ArrayList<>(); - for (Object item : result) { - newResult.add(withPipes((String[]) item)); + boolean withPipes = result.get(0) instanceof String[]; + if (result.isEmpty() == false) { + List newResult = new ArrayList<>(); + for (int i = 0; i < result.size(); i++) { + if (signature.variadic() && args.get(i).optional()) { + continue; + } + newResult.add(withPipes ? withPipes((String[]) result.get(i)) : result.get(i)); } return newResult; } From 751742422a378baa715a75c4f70823685d624045 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 14:22:30 +0000 Subject: [PATCH 066/214] AwaitsFix for #106554 --- .../java/org/elasticsearch/server/cli/JvmErgonomicsTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java index c94aa0d9f68af..0485697eb128c 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmErgonomicsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.server.cli; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressFileSystems; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.settings.Settings; @@ -40,6 +41,7 @@ @WithoutSecurityManager @SuppressFileSystems("*") +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106554") public class JvmErgonomicsTests extends ESTestCase { public void testExtractValidHeapSizeUsingXmx() throws Exception { From a368b485c2726d39ece5497396ca697b5a7d0413 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 20 Mar 2024 14:43:22 +0000 Subject: [PATCH 067/214] Release `TranslogSnapshot` buffer after iteration (#106398) Closes #106390 --- docs/changelog/106398.yaml | 6 ++++++ .../org/elasticsearch/index/translog/TranslogSnapshot.java | 1 + 2 files changed, 7 insertions(+) create mode 100644 docs/changelog/106398.yaml diff --git a/docs/changelog/106398.yaml b/docs/changelog/106398.yaml new file mode 100644 index 0000000000000..cffc5ceeb214d --- /dev/null +++ b/docs/changelog/106398.yaml @@ -0,0 +1,6 @@ +pr: 106398 +summary: Release `TranslogSnapshot` buffer after iteration +area: Engine +type: bug +issues: + - 106390 diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index fc4bec969a9be..c21a2b440ba8c 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -62,6 +62,7 @@ public Translog.Operation next() throws IOException { } skippedOperations++; } + reuse = null; // release buffer, it may be large and is no longer needed return null; } From b24e0d005359aabd98979cf9dfccc01fc760ba24 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 20 Mar 2024 16:36:00 +0100 Subject: [PATCH 068/214] Fix 60_enrich/Enrich on keyword (fixes 106507) (#106513) * Mute failing test * Remove gradle mute, and change comment to indicate this is a fix It appears the difference that caused the problem is that the query now returns a field that used to be a keyword, but is now a text field, provided by the enrich lookup. So even though the enrich is still based on a keyword, this query now accesses code paths it did not before, and those have issues before 8.13.0 related to reading text from source. --- x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle | 4 ---- .../resources/rest-api-spec/test/esql/60_enrich.yml | 5 +++++ 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 283d449937739..09397710bb856 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -39,10 +39,6 @@ BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseN def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) - systemProperty("tests.rest.blacklist", [ - // https://github.com/elastic/elasticsearch/issues/106507 - "esql/60_enrich/Enrich on keyword" - ].join(',')) testClassesDirs = sourceSets.yamlRestTest.output.classesDirs classpath = sourceSets.yamlRestTest.runtimeClasspath } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml index 2e181af93f77c..b294629dda073 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_enrich.yml @@ -92,6 +92,11 @@ teardown: --- "Enrich on keyword": + - skip: + version: " - 8.12.99" + reason: "Enriching with text field in the enrich fields list involves reading text from source, not supported before 8.13.0" + features: allowed_warnings_regex + - do: allowed_warnings_regex: - "No limit defined, adding default limit of \\[.*\\]" From 19cf318119d64fed9d3f34dc9d0f094dd688e7db Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 20 Mar 2024 16:38:38 +0100 Subject: [PATCH 069/214] Use LogDocMergePolicy in GeoPointScriptFieldDistanceFeatureQueryTests#testMatches (#106557) --- .../GeoPointScriptFieldDistanceFeatureQueryTests.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java index b6530e3c1c6bd..e6c34bb89fd8b 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQueryTests.java @@ -10,13 +10,17 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.geo.GeoTestUtil; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -79,9 +83,13 @@ protected GeoPointScriptFieldDistanceFeatureQuery mutate(GeoPointScriptFieldDist @Override public void testMatches() throws IOException { - try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(random(), new MockAnalyzer(random())); + // Use LogDocMergePolicy to avoid randomization issues with the doc retrieval order. + config.setMergePolicy(new LogDocMergePolicy()); + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, config)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [34, 6]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [-3.56, -45.98]}")))); + try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields()); From 58c9233e79840f6fc95cf30e29c9c142db1c3153 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 20 Mar 2024 11:45:54 -0400 Subject: [PATCH 070/214] [ES|QL] Centralize Datetime String Conversion (#106388) * centralize datetime string conversion --- .../src/main/resources/date.csv-spec | 4 +- .../xpack/esql/action/PositionToXContent.java | 4 +- .../xpack/esql/action/ResponseValueUtils.java | 7 ++- .../xpack/esql/analysis/Analyzer.java | 4 +- .../function/scalar/convert/ToDatetime.java | 4 +- .../function/scalar/convert/ToIP.java | 4 +- .../function/scalar/convert/ToString.java | 4 +- .../function/scalar/date/DateExtract.java | 21 +++---- .../function/scalar/date/DateFormat.java | 11 ++-- .../function/scalar/date/DateParse.java | 10 ++-- .../function/scalar/math/AutoBucket.java | 8 +-- .../planner/EsqlExpressionTranslators.java | 6 +- .../esql/type/EsqlDataTypeConverter.java | 57 ++++++++++++++++++- .../scalar/convert/ToDatetimeTests.java | 37 ++++++++---- 14 files changed, 125 insertions(+), 56 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 4bb43146aaf0d..09128eca5c18e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -167,14 +167,14 @@ birth_date:date |bd:date 1964-06-02T00:00:00.000Z|1964-06-02T00:00:00.000Z ; -convertFromString +convertFromString#[skip:-8.13.99, reason: default date formatter is changed in 8.14] // tag::to_datetime-str[] ROW string = ["1953-09-02T00:00:00.000Z", "1964-06-02T00:00:00.000Z", "1964-06-02 00:00:00"] | EVAL datetime = TO_DATETIME(string) // end::to_datetime-str[] ; warning:Line 2:19: evaluation of [TO_DATETIME(string)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:19: java.lang.IllegalArgumentException: failed to parse date field [1964-06-02 00:00:00] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z'] +warning:Line 2:19: java.lang.IllegalArgumentException: failed to parse date field [1964-06-02 00:00:00] with format [strict_date_optional_time] // tag::to_datetime-str-result[] string:keyword |datetime:date diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 176b89f80c910..8770d313ac2fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -28,7 +28,7 @@ import java.io.IOException; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; @@ -117,7 +117,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { long longVal = ((LongBlock) block).getLong(valueIndex); - return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); + return builder.value(dateTimeToString(longVal)); } }; case "geo_point", "geo_shape" -> new PositionToXContent(block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index f407d5c4b5e6e..8691f2b142d87 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -39,7 +39,8 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; @@ -131,7 +132,7 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "date" -> { long longVal = ((LongBlock) block).getLong(offset); - yield UTC_DATE_TIME_FORMATTER.formatMillis(longVal); + yield dateTimeToString(longVal); } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); @@ -177,7 +178,7 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li ); case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(parseIP(value.toString())); case "date" -> { - long longVal = UTC_DATE_TIME_FORMATTER.parseMillis(value.toString()); + long longVal = dateTimeToLong(value.toString()); ((LongBlock.Builder) builder).appendLong(longVal); } case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index ce5a43c617b78..14344502f165a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.VerificationException; @@ -83,6 +82,7 @@ import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; @@ -774,7 +774,7 @@ private static Expression stringToDate(Expression stringExpression) { Long millis = null; // TODO: better control over this string format - do we want this to be flexible or always redirect folks to use date parsing try { - millis = str == null ? null : DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(str); + millis = str == null ? null : dateTimeToLong(str); } catch (Exception ex) { // in case of exception, millis will be null which will trigger an error } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index b8464ce8e000d..a1dae4a06b0bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -11,7 +11,6 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,6 +19,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -70,6 +70,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static long fromKeyword(BytesRef in) { - return DateParse.process(in, DateParse.DEFAULT_FORMATTER); + return dateTimeToLong(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index e1d239ae93482..856f903e278c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_IP; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; public class ToIP extends AbstractConvertFunction { @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef asString) { - return parseIP(asString.utf8ToString()); + return (BytesRef) STRING_TO_IP.convert(asString); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 4772949d90ef5..33663534bf6cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; @@ -36,7 +37,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; @@ -117,7 +117,7 @@ static BytesRef fromIP(BytesRef ip) { @ConvertEvaluator(extraName = "FromDatetime") static BytesRef fromDatetime(long datetime) { - return new BytesRef(UTC_DATE_TIME_FORMATTER.formatMillis(datetime)); + return new BytesRef(dateTimeToString(datetime)); } @ConvertEvaluator(extraName = "FromDouble") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 4734f013cef35..4f31f73963569 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -24,13 +23,13 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.time.Instant; import java.time.ZoneId; import java.time.temporal.ChronoField; import java.util.List; -import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; @@ -76,13 +75,12 @@ private ChronoField chronoField() { // TODO: move the slimmed down code here to toEvaluator? if (chronoField == null) { Expression field = children().get(0); - if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { - try { - BytesRef br = BytesRefs.toBytesRef(field.fold()); - chronoField = ChronoField.valueOf(br.utf8ToString().toUpperCase(Locale.ROOT)); - } catch (Exception e) { - return null; + try { + if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { + chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); } + } catch (Exception e) { + return null; } } return chronoField; @@ -90,13 +88,12 @@ private ChronoField chronoField() { @Evaluator(warnExceptions = { IllegalArgumentException.class }) static long process(long value, BytesRef chronoField, @Fixed ZoneId zone) { - ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); - return Instant.ofEpochMilli(value).atZone(zone).getLong(chrono); + return chronoToLong(value, chronoField, zone); } @Evaluator(extraName = "Constant") static long process(long value, @Fixed ChronoField chronoField, @Fixed ZoneId zone) { - return Instant.ofEpochMilli(value).atZone(zone).getLong(chronoField); + return chronoToLong(value, chronoField, zone); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index a889f5949b827..85e8a0f3aec47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -28,11 +28,12 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { @@ -83,19 +84,19 @@ public boolean foldable() { @Evaluator(extraName = "Constant") static BytesRef process(long val, @Fixed DateFormatter formatter) { - return new BytesRef(formatter.formatMillis(val)); + return new BytesRef(dateTimeToString(val, formatter)); } @Evaluator static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { - return process(val, toFormatter(formatter, locale)); + return new BytesRef(dateTimeToString(val, toFormatter(formatter, locale))); } @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_DATE_TIME_FORMATTER, dvrCtx); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); @@ -115,7 +116,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.DATE_PERIOD)), - STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.TIME_DURATION)); + STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.TIME_DURATION)), + STRING_TO_CHRONO_FIELD(EsqlDataTypeConverter::stringToChrono), + STRING_TO_IP(x -> EsqlDataTypeConverter.stringToIP((BytesRef) x)); private static final String NAME = "esql-converter"; private final Function converter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 808249a01969a..93a0d0b5190f5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -25,6 +24,7 @@ import java.util.function.Supplier; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; public class ToDatetimeTests extends AbstractFunctionTestCase { public ToDatetimeTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -103,7 +103,7 @@ public static Iterable parameters() { "Line -1:-1: java.lang.IllegalArgumentException: " + (bytesRef.utf8ToString().isEmpty() ? "cannot parse empty datetime" - : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']")) + : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [strict_date_optional_time]")) ) ); TestCaseSupplier.unary( @@ -113,12 +113,12 @@ public static Iterable parameters() { new TestCaseSupplier.TypedDataSupplier( "", // millis past "0001-01-01T00:00:00.000Z" to match the default formatter - () -> new BytesRef(randomDateString(-62135596800000L, Long.MAX_VALUE)), + () -> new BytesRef(randomDateString(-62135596800000L, 253402300799999L)), DataTypes.KEYWORD ) ), DataTypes.DATETIME, - bytesRef -> DateParse.DEFAULT_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), + bytesRef -> DEFAULT_DATE_TIME_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), emptyList() ); TestCaseSupplier.unary( @@ -138,7 +138,27 @@ public static Iterable parameters() { "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + ((BytesRef) bytesRef).utf8ToString() - + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']" + + "] with format [strict_date_optional_time]" + ) + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis before "0001-01-01T00:00:00.000Z" + () -> new BytesRef(randomDateString(253402300800000L, Long.MAX_VALUE)), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + + ((BytesRef) bytesRef).utf8ToString() + + "] with format [strict_date_optional_time]" ) ); @@ -146,12 +166,7 @@ public static Iterable parameters() { } private static String randomDateString(long from, long to) { - String result = Instant.ofEpochMilli(randomLongBetween(from, to)).toString(); - if (result.matches(".*:..Z")) { - // it's a zero millisecond date string, Instant.toString() will strip the milliseconds (and the parsing will fail) - return result.replace("Z", ".000Z"); - } - return result; + return Instant.ofEpochMilli(randomLongBetween(from, to)).toString(); } @Override From 43de7a435d99cc79132ec97542230898172ecf8c Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 20 Mar 2024 16:57:17 +0100 Subject: [PATCH 071/214] Introduce a generic HashMap backed by big arrays (#106418) this commit introduces a PagedHashMap which is equivalent of a Java HashMap but backed by big arrays. --- .../common/util/ObjectObjectPagedHashMap.java | 199 ++++++++++++++++++ .../bucket/prefix/InternalIpPrefix.java | 41 ++-- .../terms/InternalSignificantTerms.java | 43 ++-- .../util/ObjectObjectPagedHashMapTests.java | 92 ++++++++ .../common/util/MockBigArrays.java | 9 +- 5 files changed, 351 insertions(+), 33 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java create mode 100644 server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java diff --git a/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java new file mode 100644 index 0000000000000..2b444741dd363 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/ObjectObjectPagedHashMap.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.core.Releasables; + +import java.util.Iterator; +import java.util.NoSuchElementException; + +/** + * A hash table from objects to objects. This implementation resolves collisions + * with open addressing and linear probing and does not support null values. + * This class is not thread-safe. + * + * Note that this class does not track either the actual keys or values. It is responsibility of + * the caller to release those objects if necessary. + */ +public final class ObjectObjectPagedHashMap extends AbstractPagedHashMap implements Iterable> { + + private ObjectArray keys; + private ObjectArray values; + + public ObjectObjectPagedHashMap(long capacity, BigArrays bigArrays) { + this(capacity, DEFAULT_MAX_LOAD_FACTOR, bigArrays); + } + + public ObjectObjectPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { + super(capacity, maxLoadFactor, bigArrays); + keys = bigArrays.newObjectArray(capacity()); + boolean success = false; + try { + values = bigArrays.newObjectArray(capacity()); + success = true; + } finally { + if (false == success) { + close(); + } + } + } + + /** + * Get the value that is associated with key or null if key + * was not present in the hash table. + */ + public V get(K key) { + final long slot = slot(key.hashCode(), mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V value = values.get(index); + if (value == null) { + return null; + } else if (keys.get(index).equals(key)) { + return value; + } + } + } + + /** + * Put this new (key, value) pair into this hash table and return the value + * that was previously associated with key or null in case of + * an insertion. + */ + public V put(K key, V value) { + if (size >= maxSize) { + assert size == maxSize; + grow(); + } + assert size < maxSize; + return set(key, key.hashCode(), value); + } + + /** + * Remove the entry which has this key in the hash table and return the + * associated value or null if there was no entry associated with this key. + */ + public V remove(K key) { + final long slot = slot(key.hashCode(), mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V previous = values.set(index, null); + if (previous == null) { + return null; + } else if (keys.get(index).equals(key)) { + --size; + for (long j = nextSlot(index, mask); used(j); j = nextSlot(j, mask)) { + removeAndAdd(j); + } + return previous; + } else { + // repair and continue + values.set(index, previous); + } + } + } + + private V set(K key, int code, V value) { + assert key.hashCode() == code; + assert value != null; + assert size < maxSize; + final long slot = slot(code, mask); + for (long index = slot;; index = nextSlot(index, mask)) { + final V previous = values.set(index, value); + if (previous == null) { + // slot was free + keys.set(index, key); + ++size; + return null; + } else if (key.equals(keys.get(index))) { + // we just updated the value + return previous; + } else { + // not the right key, repair and continue + values.set(index, previous); + } + } + } + + @Override + public Iterator> iterator() { + return new Iterator<>() { + + boolean cached; + final Cursor cursor; + { + cursor = new Cursor<>(); + cursor.index = -1; + cached = false; + } + + @Override + public boolean hasNext() { + if (cached == false) { + while (true) { + ++cursor.index; + if (cursor.index >= capacity()) { + break; + } else if (used(cursor.index)) { + cursor.key = keys.get(cursor.index); + cursor.value = values.get(cursor.index); + break; + } + } + cached = true; + } + return cursor.index < capacity(); + } + + @Override + public Cursor next() { + if (hasNext() == false) { + throw new NoSuchElementException(); + } + cached = false; + return cursor; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + }; + } + + @Override + public void close() { + Releasables.close(keys, values); + } + + @Override + protected void resize(long capacity) { + keys = bigArrays.resize(keys, capacity); + values = bigArrays.resize(values, capacity); + } + + @Override + protected boolean used(long bucket) { + return values.get(bucket) != null; + } + + @Override + protected void removeAndAdd(long index) { + final K key = keys.get(index); + final V value = values.set(index, null); + --size; + final V removed = set(key, key.hashCode(), value); + assert removed == null; + } + + public static final class Cursor { + public long index; + public K key; + public V value; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index a3f53b494acfa..50b92a2e77841 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -26,7 +27,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -225,30 +225,40 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final Map buckets = new HashMap<>(); + final ObjectObjectPagedHashMap buckets = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; for (Bucket bucket : ipPrefix.getBuckets()) { - ReducerAndProto reducerAndProto = buckets.computeIfAbsent( - bucket.key, - k -> new ReducerAndProto(new MultiBucketAggregatorsReducer(reduceContext, size), bucket) - ); + ReducerAndProto reducerAndProto = buckets.get(bucket.key); + if (reducerAndProto == null) { + reducerAndProto = new ReducerAndProto(new MultiBucketAggregatorsReducer(reduceContext, size), bucket); + boolean success = false; + try { + buckets.put(bucket.key, reducerAndProto); + success = true; + } finally { + if (success == false) { + Releasables.close(reducerAndProto.reducer); + } + } + } reducerAndProto.reducer.accept(bucket); } } @Override public InternalAggregation get() { - final List reducedBuckets = new ArrayList<>(buckets.size()); - for (ReducerAndProto reducerAndProto : buckets.values()) { - if (false == reduceContext.isFinalReduce() || reducerAndProto.reducer.getDocCount() >= minDocCount) { - reducedBuckets.add( - createBucket(reducerAndProto.proto, reducerAndProto.reducer.get(), reducerAndProto.reducer.getDocCount()) - ); + final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); + buckets.iterator().forEachRemaining(entry -> { + if (false == reduceContext.isFinalReduce() || entry.value.reducer.getDocCount() >= minDocCount) { + reducedBuckets.add(createBucket(entry.value.proto, entry.value.reducer.get(), entry.value.reducer.getDocCount())); } - } + }); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); reducedBuckets.sort(Comparator.comparing(a -> a.key)); return new InternalIpPrefix(getName(), format, keyed, minDocCount, reducedBuckets, metadata); @@ -256,9 +266,8 @@ public InternalAggregation get() { @Override public void close() { - for (ReducerAndProto reducerAndProto : buckets.values()) { - Releasables.close(reducerAndProto.reducer); - } + buckets.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + Releasables.close(buckets); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 0d06e455c57fa..440e42f845ce2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -24,7 +25,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -202,7 +202,10 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont return new AggregatorReducer() { long globalSubsetSize = 0; long globalSupersetSize = 0; - final Map> buckets = new HashMap<>(); + final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { @@ -213,10 +216,19 @@ public void accept(InternalAggregation aggregation) { globalSubsetSize += terms.getSubsetSize(); globalSupersetSize += terms.getSupersetSize(); for (B bucket : terms.getBuckets()) { - final ReducerAndProto reducerAndProto = buckets.computeIfAbsent( - bucket.getKeyAsString(), - k -> new ReducerAndProto<>(new MultiBucketAggregatorsReducer(reduceContext, size), bucket) - ); + ReducerAndProto reducerAndProto = buckets.get(bucket.getKeyAsString()); + if (reducerAndProto == null) { + reducerAndProto = new ReducerAndProto<>(new MultiBucketAggregatorsReducer(reduceContext, size), bucket); + boolean success = false; + try { + buckets.put(bucket.getKeyAsString(), reducerAndProto); + success = true; + } finally { + if (success == false) { + Releasables.close(reducerAndProto.reducer); + } + } + } reducerAndProto.reducer.accept(bucket); reducerAndProto.subsetDf[0] += bucket.subsetDf; reducerAndProto.supersetDf[0] += bucket.supersetDf; @@ -226,16 +238,16 @@ public void accept(InternalAggregation aggregation) { @Override public InternalAggregation get() { final SignificanceHeuristic heuristic = getSignificanceHeuristic().rewrite(reduceContext); - final int size = reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size()); + final int size = (int) (reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); try (BucketSignificancePriorityQueue ordered = new BucketSignificancePriorityQueue<>(size, reduceContext.bigArrays())) { - for (ReducerAndProto reducerAndProto : buckets.values()) { + buckets.iterator().forEachRemaining(entry -> { final B b = createBucket( - reducerAndProto.subsetDf[0], + entry.value.subsetDf[0], globalSubsetSize, - reducerAndProto.supersetDf[0], + entry.value.supersetDf[0], globalSupersetSize, - reducerAndProto.reducer.get(), - reducerAndProto.proto + entry.value.reducer.get(), + entry.value.proto ); b.updateScore(heuristic); if (((b.score > 0) && (b.subsetDf >= minDocCount)) || reduceContext.isFinalReduce() == false) { @@ -248,7 +260,7 @@ public InternalAggregation get() { } else { reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); } - } + }); final B[] list = createBucketsArray((int) ordered.size()); for (int i = (int) ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); @@ -259,9 +271,8 @@ public InternalAggregation get() { @Override public void close() { - for (ReducerAndProto reducerAndProto : buckets.values()) { - Releasables.close(reducerAndProto.reducer); - } + buckets.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + Releasables.close(buckets); } }; } diff --git a/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java b/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java new file mode 100644 index 0000000000000..198adda7c33be --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/util/ObjectObjectPagedHashMapTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class ObjectObjectPagedHashMapTests extends ESTestCase { + + private BigArrays mockBigArrays(CircuitBreakerService service) { + return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), service, true); + } + + public void testDuel() { + // first with cranky + try { + doTestDuel(mockBigArrays(new CrankyCircuitBreakerService())); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo("cranky breaker")); + } + // then to the end + doTestDuel(mockBigArrays(new NoneCircuitBreakerService())); + } + + private void doTestDuel(BigArrays bigArrays) { + final Map map1 = new HashMap<>(); + try ( + ObjectObjectPagedHashMap map2 = new ObjectObjectPagedHashMap<>( + randomInt(42), + 0.6f + randomFloat() * 0.39f, + bigArrays + ) + ) { + final int maxKey = randomIntBetween(1, 10000); + BytesRef[] bytesRefs = new BytesRef[maxKey]; + for (int i = 0; i < maxKey; i++) { + bytesRefs[i] = randomBytesRef(); + } + final int iters = scaledRandomIntBetween(10000, 100000); + for (int i = 0; i < iters; ++i) { + final boolean put = randomBoolean(); + final int iters2 = randomIntBetween(1, 100); + for (int j = 0; j < iters2; ++j) { + final BytesRef key = bytesRefs[random().nextInt(maxKey)]; + if (put) { + final Object value = new Object(); + assertSame(map1.put(key, value), map2.put(key, value)); + } else { + assertSame(map1.remove(key), map2.remove(key)); + } + assertEquals(map1.size(), map2.size()); + } + } + for (int i = 0; i < maxKey; i++) { + assertSame(map1.get(bytesRefs[i]), map2.get(bytesRefs[i])); + } + final Map copy = new HashMap<>(); + for (ObjectObjectPagedHashMap.Cursor cursor : map2) { + copy.put(cursor.key, cursor.value); + } + assertEquals(map1, copy); + } + } + + private BytesRef randomBytesRef() { + byte[] bytes = new byte[randomIntBetween(2, 20)]; + random().nextBytes(bytes); + return new BytesRef(bytes); + } + + public void testAllocation() { + MockBigArrays.assertFitsIn(ByteSizeValue.ofBytes(256), bigArrays -> new ObjectObjectPagedHashMap<>(1, bigArrays)); + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 481ca207a71cf..52614dee8d04a 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -144,11 +144,18 @@ public MockBigArrays(PageCacheRecycler recycler, ByteSizeValue limit) { when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new LimitedBreaker(CircuitBreaker.REQUEST, limit)); } + /** + * Create {@linkplain BigArrays} with a provided breaker service. The breaker is not enable by default. + */ public MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService) { this(recycler, breakerService, false); } - private MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService, boolean checkBreaker) { + /** + * Create {@linkplain BigArrays} with a provided breaker service. The breaker can be enabled with the + * {@code checkBreaker} flag. + */ + public MockBigArrays(PageCacheRecycler recycler, CircuitBreakerService breakerService, boolean checkBreaker) { super(recycler, breakerService, CircuitBreaker.REQUEST, checkBreaker); this.recycler = recycler; this.breakerService = breakerService; From 2523ed90b9d547c4de4ecca4c3ff8da11f29a12f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 20 Mar 2024 09:04:52 -0700 Subject: [PATCH 072/214] Validate that test cluster BWC nodes use the default distribution (#106559) We have instances where BWC tests configure old ES version nodes with the integTest distribution. This isn't a valid configuration, and while we in reality resolve the default distribution artifact, we have other configuration logic that behaves differently based on whether the integTest distro was _requested_. Specifically, what to set ES_JAVA_HOME to. This bug resulted in us attempting to run old nodes using the current bundled JDK version, which may be incompatible with that older version of Elasticsearch. Closes #104858 --- .../test/cluster/local/LocalClusterSpec.java | 9 +++++++++ .../remotecluster/RemoteClusterSecurityBwcRestIT.java | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java index de0d541c8535f..062bb3bd3fa5a 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java @@ -71,6 +71,15 @@ void validate() { if (nodeNames.isEmpty() == false) { throw new IllegalArgumentException("Cluster cannot contain nodes with duplicates names: " + nodeNames); } + + // Ensure we do not configure older version nodes with the integTest distribution + if (nodes.stream().anyMatch(n -> n.getVersion() != Version.CURRENT && n.getDistributionType() == DistributionType.INTEG_TEST)) { + throw new IllegalArgumentException( + "Error configuring test cluster '" + + name + + "'. When configuring a node for a prior Elasticsearch version, the default distribution type must be used." + ); + } } public static class LocalNodeSpec { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index 7c26b8e386cc5..2db510ffc7f41 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -49,7 +49,7 @@ public class RemoteClusterSecurityBwcRestIT extends AbstractRemoteClusterSecurit static { fulfillingCluster = ElasticsearchCluster.local() .version(OLD_CLUSTER_VERSION) - .distribution(DistributionType.INTEG_TEST) + .distribution(DistributionType.DEFAULT) .name("fulfilling-cluster") .apply(commonClusterConfig) .setting("xpack.ml.enabled", "false") From 52b311d7ee685a669e1f8420ea2c6e6da4c477b1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 20 Mar 2024 19:19:24 +0100 Subject: [PATCH 073/214] Rename index names in 25_id_generation.yml file (#106558) Rename index names in yaml test to more easily find related log lines in es server log. Many yaml tests are being executed in the same qa cluster, this should help finding relevant logs for failures in this yaml file. Relates to #106550 --- .../test/tsdb/25_id_generation.yml | 144 +++++++----------- 1 file changed, 52 insertions(+), 92 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 6ef03ba8ebcc4..04fa2faca209f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -1,12 +1,12 @@ --- setup: - skip: - version: "- 8.12.99" - reason: _tsid hashing introduced in 8.13 + version: "- 8.13.99" + reason: _tsid hashing introduced in 8.13 and tsid routing changed in 8.14 - do: indices.create: - index: test + index: id_generation_test body: settings: index: @@ -44,7 +44,7 @@ setup: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' @@ -65,14 +65,10 @@ setup: --- generates a consistent id: - - skip: - version: " - 8.1.99" - reason: ID generation added in 8.2 - - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:52:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' @@ -81,15 +77,35 @@ generates a consistent id: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:52:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "network": {"tx": 2001818691, "rx": 802133794}}}}' - match: {items.0.index._id: cZZNs7B9sSWsyrL5AAABeRnS7fM} + - do: + get: + index: id_generation_test + id: cn4excfoxSs_KdA5AAABeRnRFAY + - match: {_index: id_generation_test} + - match: {_id: cn4excfoxSs_KdA5AAABeRnRFAY} + - match: + _source: + "@timestamp": "2021-04-28T18:50:03.142Z" + metricset: pod + k8s: + pod: + name: dog + uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 + ip: 10.10.55.3 + network: + tx: 1434521831 + rx: 530575198 + + - do: search: - index: test + index: id_generation_test body: query: match_all: {} @@ -135,13 +151,9 @@ generates a consistent id: --- index a new document on top of an old one: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -157,7 +169,7 @@ index a new document on top of an old one: - do: index: refresh: true - index: test + index: id_generation_test op_type: index body: "@timestamp": "2021-04-28T18:51:03.142Z" @@ -174,7 +186,7 @@ index a new document on top of an old one: - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -189,13 +201,9 @@ index a new document on top of an old one: --- index a new document on top of an old one over bulk: - - skip: - version: " - 8.1.99" - reason: indexing on top of another document support added in 8.2 - - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -211,7 +219,7 @@ index a new document on top of an old one over bulk: - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 111434595272, "rx": 430605511}}}}' @@ -219,7 +227,7 @@ index a new document on top of an old one over bulk: - do: search: - index: test + index: id_generation_test body: size: 0 aggs: @@ -234,15 +242,11 @@ index a new document on top of an old one over bulk: --- create operation on top of old document fails: - - skip: - version: " - 8.1.99" - reason: id generation changed in 8.2 - - do: catch: "/\\[cn4excfoxSs_KdA5AAABeRnR_mY\\]\\[.*@2021-04-28T18:51:03.142Z\\]: version conflict, document already exists \\(current version \\[1\\]\\)/" index: refresh: true - index: test + index: id_generation_test body: "@timestamp": "2021-04-28T18:51:03.142Z" metricset: pod @@ -257,14 +261,10 @@ create operation on top of old document fails: --- create operation on top of old document fails over bulk: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: bulk: refresh: true - index: test + index: id_generation_test body: - '{"create": {}}' - '{"@timestamp": "2021-04-28T18:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.3", "network": {"tx": 111434595272, "rx": 430605511}}}}' @@ -272,13 +272,9 @@ create operation on top of old document fails over bulk: --- ids query: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: search: - index: test + index: id_generation_test body: fields: - field: k8s.pod.network.tx @@ -294,15 +290,11 @@ ids query: --- get: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: get: - index: test + index: id_generation_test id: cZZNs7B9sSWsyrL5AAABeRnSA5M - - match: {_index: test} + - match: {_index: id_generation_test} - match: {_id: cZZNs7B9sSWsyrL5AAABeRnSA5M} - match: _source: @@ -319,89 +311,65 @@ get: --- get not found: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: missing get: - index: test + index: id_generation_test id: not found --- get with routing: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: bad_request get: - index: test + index: id_generation_test id: cZZNs-xII2fZweptAAABeRnSA5M routing: routing --- delete: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: delete: - index: test + index: id_generation_test id: cn4excfoxSs_KdA5AAABeRnR_mY - match: {result: deleted} --- delete not found: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: missing delete: - index: test + index: id_generation_test id: not found --- delete with routing: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - - do: catch: bad_request delete: - index: test + index: id_generation_test id: not found routing: routing --- delete over _bulk: - - skip: - version: " - 8.1.99" - reason: ids generation changed in 8.2 - # mget call added to investigate test failure: https://github.com/elastic/elasticsearch/issues/93852 # (should be removed when test issue is resolved) - do: mget: - index: test + index: id_generation_test body: ids: [ cn4excfoxSs_KdA5AAABeRnR_mY, cn4excfoxSs_KdA5AAABeRnR11Y ] - - match: { docs.0._index: "test" } + - match: { docs.0._index: "id_generation_test" } - match: { docs.0._id: "cn4excfoxSs_KdA5AAABeRnR_mY" } - match: { docs.0.found: true } - - match: { docs.1._index: "test" } + - match: { docs.1._index: "id_generation_test" } - match: { docs.1._id: "cn4excfoxSs_KdA5AAABeRnR11Y" } - match: { docs.1.found: true } - do: bulk: - index: test + index: id_generation_test body: - '{"delete": {"_id": "cn4excfoxSs_KdA5AAABeRnR_mY"}}' - '{"delete": {"_id": "cn4excfoxSs_KdA5AAABeRnR11Y"}}' @@ -409,17 +377,13 @@ delete over _bulk: - match: {items.0.delete.result: deleted} - match: {items.1.delete.result: deleted} - match: {items.2.delete.status: 404} - - match: {items.2.delete.error.reason: "invalid id [not found ++ not found] for index [test] in time series mode"} + - match: {items.2.delete.error.reason: "invalid id [not found ++ not found] for index [id_generation_test] in time series mode"} --- routing_path matches deep object: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: indices.create: - index: test2 + index: routing_path_test body: settings: index: @@ -449,7 +413,7 @@ routing_path matches deep object: - do: bulk: refresh: true - index: test2 + index: routing_path_test body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"bar": {"baz": {"uid": "uid1"}}}}}' @@ -458,13 +422,9 @@ routing_path matches deep object: --- routing_path matches object: - - skip: - version: " - 8.12.99" - reason: _tsid hashing introduced in 8.13 - - do: indices.create: - index: test2 + index: routing_path_test_2 body: settings: index: @@ -490,7 +450,7 @@ routing_path matches object: - do: bulk: refresh: true - index: test2 + index: routing_path_test_2 body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"uid": "uid1"}}}' From 2988799079cd75593d53bbf3d1862cf6746481ff Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 20 Mar 2024 20:27:08 +0200 Subject: [PATCH 074/214] [DSL Global Retention] Use data stream global retention metadata (#106221) --- .../lifecycle/apis/get-lifecycle.asciidoc | 8 +- ...orial-manage-existing-data-stream.asciidoc | 4 +- .../tutorial-manage-new-data-stream.asciidoc | 17 ++- ...grate-data-stream-from-ilm-to-dsl.asciidoc | 16 ++- .../action/GetDataStreamsTransportAction.java | 4 +- .../ExplainDataStreamLifecycleAction.java | 38 +++++-- .../action/GetDataStreamLifecycleAction.java | 66 ++++++++---- ...sportExplainDataStreamLifecycleAction.java | 4 +- ...TransportGetDataStreamLifecycleAction.java | 4 +- .../GetDataStreamsTransportActionTests.java | 44 ++++++++ ...plainDataStreamLifecycleResponseTests.java | 46 ++++++-- .../org/elasticsearch/TransportVersions.java | 1 + .../get/GetComponentTemplateAction.java | 32 +++++- .../get/GetComposableIndexTemplateAction.java | 30 +++++- .../TransportGetComponentTemplateAction.java | 4 +- ...sportGetComposableIndexTemplateAction.java | 4 +- .../post/SimulateIndexTemplateResponse.java | 43 +++++--- .../TransportSimulateIndexTemplateAction.java | 9 +- .../post/TransportSimulateTemplateAction.java | 7 +- .../datastreams/GetDataStreamAction.java | 50 +++++++-- .../ExplainIndexDataStreamLifecycle.java | 17 ++- .../cluster/metadata/ComponentTemplate.java | 12 ++- .../metadata/ComposableIndexTemplate.java | 12 ++- .../cluster/metadata/DataStreamLifecycle.java | 45 ++++---- .../cluster/metadata/Template.java | 12 ++- .../GetComponentTemplateResponseTests.java | 101 ++++++++++++++++-- .../metadata/ComponentTemplateTests.java | 18 ++-- .../ComposableIndexTemplateTests.java | 19 ++-- ...reamGlobalRetentionSerializationTests.java | 5 +- .../metadata/DataStreamLifecycleTests.java | 13 ++- .../cluster/metadata/DataStreamTests.java | 11 +- 31 files changed, 541 insertions(+), 155 deletions(-) diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index f20a3393c191c..1bda7d8959bee 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -130,14 +130,18 @@ The response will look like the following: "name": "my-data-stream-1", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } }, { "name": "my-data-stream-2", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } } ] diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc index 5670faaade3ce..7be2b30b9b83c 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc @@ -74,7 +74,9 @@ The response will look like: "generation_time": "6.84s", <9> "lifecycle": { "enabled": true, - "data_retention": "30d" <10> + "data_retention": "30d", + "effective_retention": "30d" <10> + "retention_determined_by": "data_stream_configuration" } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc index 6f1d81ab6ead2..ecfdc16884082 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc @@ -93,10 +93,12 @@ The result will look like this: { "data_streams": [ { - "name": "my-data-stream",<1> + "name": "my-data-stream", <1> "lifecycle": { - "enabled": true, <2> - "data_retention": "7d" <3> + "enabled": true, <2> + "data_retention": "7d", <3> + "effective_retention": "7d", <4> + "retention_determined_by": "data_stream_configuration" <5> } } ] @@ -104,8 +106,11 @@ The result will look like this: -------------------------------------------------- <1> The name of your data stream. <2> Shows if the data stream lifecycle is enabled for this data stream. -<3> The retention period of the data indexed in this data stream, this means that the data in this data stream will +<3> The desired retention period of the data indexed in this data stream, this means that if there are no other limitations +the data for this data stream will be preserved for at least 7 days. +<4> The effective retention, this means that the data in this data stream will be kept at least for 7 days. After that {es} can delete it at its own discretion. +<5> The configuration that determined the effective retention. If you want to see more information about how the data stream lifecycle is applied on individual backing indices use the <>: @@ -128,7 +133,9 @@ The result will look like this: "time_since_index_creation": "1.6m", <3> "lifecycle": { <4> "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" } } } diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 3125c82120d8d..65eaf472890f4 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -200,10 +200,10 @@ PUT _index_template/dsl-data-stream-template "template": { "settings": { "index.lifecycle.name": "pre-dsl-ilm-policy", - "index.lifecycle.prefer_ilm": false <1> + "index.lifecycle.prefer_ilm": false <1> }, - "lifecycle": { - "data_retention": "7d" <2> + "lifecycle": { <2> + "data_retention": "7d" <3> } } } @@ -215,6 +215,8 @@ PUT _index_template/dsl-data-stream-template precedence over data stream lifecycle. <2> We're configuring the data stream lifecycle so _new_ data streams will be managed by data stream lifecycle. +<3> The desired retention, meaning that this data stream should keep the data for at least 7 days, +if this retention is possible. We've now made sure that new data streams will be managed by data stream lifecycle. @@ -268,7 +270,9 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", <3> @@ -346,7 +350,9 @@ GET _data_stream/dsl-data-stream "template": "dsl-data-stream-template", "lifecycle": { "enabled": true, - "data_retention": "7d" + "data_retention": "7d", + "effective_retention": "7d", + "retention_determined_by": "data_stream_configuration" }, "ilm_policy": "pre-dsl-ilm-policy", "next_generation_managed_by": "Data stream lifecycle", diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index e44ee5107711f..2b1d4ae01f565 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -199,7 +200,8 @@ static GetDataStreamAction.Response innerOperation( } return new GetDataStreamAction.Response( dataStreamInfos, - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java index 676052f76d564..5bfdf2d382005 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -16,6 +17,8 @@ import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -136,23 +139,33 @@ public Request indicesOptions(IndicesOptions indicesOptions) { } /** - * Class representing the response for the explain of the data stream lifecycle action for one or more indices. + * Class representing the response for the 'explain' of the data stream lifecycle action for one or more indices. */ public static class Response extends ActionResponse implements ChunkedToXContentObject { public static final ParseField INDICES_FIELD = new ParseField("indices"); - private List indices; + private final List indices; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; - public Response(List indices, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List indices, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.indices = indices; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { super(in); this.indices = in.readCollectionAsList(ExplainIndexDataStreamLifecycle::new); this.rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); + this.globalRetention = in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null; } public List getIndices() { @@ -163,10 +176,17 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(indices); out.writeOptionalWriteable(rolloverConfiguration); + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -178,12 +198,14 @@ public boolean equals(Object o) { return false; } Response response = (Response) o; - return Objects.equals(indices, response.indices) && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + return Objects.equals(indices, response.indices) + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(indices, rolloverConfiguration); + return Objects.hash(indices, rolloverConfiguration, globalRetention); } @Override @@ -194,7 +216,11 @@ public Iterator toXContentChunked(ToXContent.Params outerP return builder; }), Iterators.map(indices.iterator(), explainIndexDataLifecycle -> (builder, params) -> { builder.field(explainIndexDataLifecycle.getIndex()); - explainIndexDataLifecycle.toXContent(builder, params, rolloverConfiguration); + ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + explainIndexDataLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); return builder; }), Iterators.single((builder, params) -> { builder.endObject(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java index 8149e1a0df443..79e1b71771559 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleAction.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.datastreams.lifecycle.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -14,6 +15,7 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -157,19 +159,24 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** - * Converts the response to XContent and passes the RolloverConditions, when provided, to the data stream lifecycle. + * Converts the response to XContent and passes the RolloverConditions and the global retention, when provided, + * to the data stream lifecycle. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(NAME_FIELD.getPreferredName(), dataStreamName); if (lifecycle != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + lifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); } builder.endObject(); return builder; @@ -179,18 +186,31 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla private final List dataStreamLifecycles; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(List dataStreamLifecycles) { - this(dataStreamLifecycles, null); + this(dataStreamLifecycles, null, null); } - public Response(List dataStreamLifecycles, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List dataStreamLifecycles, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.dataStreamLifecycles = dataStreamLifecycles; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { - this(in.readCollectionAsList(Response.DataStreamLifecycle::new), in.readOptionalWriteable(RolloverConfiguration::new)); + this( + in.readCollectionAsList(Response.DataStreamLifecycle::new), + in.readOptionalWriteable(RolloverConfiguration::new), + in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null + ); } public List getDataStreamLifecycles() { @@ -206,6 +226,9 @@ public RolloverConfiguration getRolloverConfiguration() { public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreamLifecycles); out.writeOptionalWriteable(rolloverConfiguration); + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -214,17 +237,17 @@ public Iterator toXContentChunked(ToXContent.Params outerParams) { builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); return builder; - }), - Iterators.map( - dataStreamLifecycles.iterator(), - dataStreamLifecycle -> (builder, params) -> dataStreamLifecycle.toXContent(builder, params, rolloverConfiguration) - ), - Iterators.single((builder, params) -> { - builder.endArray(); - builder.endObject(); - return builder; - }) - ); + }), Iterators.map(dataStreamLifecycles.iterator(), dataStreamLifecycle -> (builder, params) -> { + ToXContent.Params withEffectiveRetentionParams = new ToXContent.DelegatingMapParams( + org.elasticsearch.cluster.metadata.DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + return dataStreamLifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); + }), Iterators.single((builder, params) -> { + builder.endArray(); + builder.endObject(); + return builder; + })); } @Override @@ -233,12 +256,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Response response = (Response) o; return dataStreamLifecycles.equals(response.dataStreamLifecycles) - && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(dataStreamLifecycles, rolloverConfiguration); + return Objects.hash(dataStreamLifecycles, rolloverConfiguration, globalRetention); } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java index a42e8dfefc468..a5c3b092a8913 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportExplainDataStreamLifecycleAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -111,7 +112,8 @@ protected void masterOperation( listener.onResponse( new ExplainDataStreamLifecycleAction.Response( explainIndices, - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ) ); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java index 29b88fc5748bf..84144cdcb0379 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -89,7 +90,8 @@ protected void masterOperation( ) .sorted(Comparator.comparing(GetDataStreamLifecycleAction.Response.DataStreamLifecycle::dataStreamName)) .toList(), - request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null + request.includeDefaults() ? clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) : null, + DataStreamGlobalRetention.getFromClusterState(state) ) ); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java index 637fb44affb6f..2a356e3ebb166 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportActionTests.java @@ -11,11 +11,13 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -35,6 +37,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class GetDataStreamsTransportActionTests extends ESTestCase { @@ -248,4 +251,45 @@ public void testGetTimeSeriesMixedDataStream() { ) ); } + + public void testPassingGlobalRetention() { + ClusterState state; + { + var mBuilder = new Metadata.Builder(); + DataStreamTestHelper.getClusterStateWithDataStreams( + mBuilder, + List.of(Tuple.tuple("data-stream-1", 2)), + List.of(), + System.currentTimeMillis(), + Settings.EMPTY, + 0, + false, + false + ); + state = ClusterState.builder(new ClusterName("_name")).metadata(mBuilder).build(); + } + + var req = new GetDataStreamAction.Request(new String[] {}); + var response = GetDataStreamsTransportAction.innerOperation( + state, + req, + resolver, + systemIndices, + ClusterSettings.createBuiltInClusterSettings() + ); + assertThat(response.getGlobalRetention(), nullValue()); + DataStreamGlobalRetention globalRetention = new DataStreamGlobalRetention( + TimeValue.timeValueDays(randomIntBetween(1, 5)), + TimeValue.timeValueDays(randomIntBetween(5, 10)) + ); + state = ClusterState.builder(state).putCustom(DataStreamGlobalRetention.TYPE, globalRetention).build(); + response = GetDataStreamsTransportAction.innerOperation( + state, + req, + resolver, + systemIndices, + ClusterSettings.createBuiltInClusterSettings() + ); + assertThat(response.getGlobalRetention(), equalTo(globalRetention)); + } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java index 829fe454f7463..462c0626c6296 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java @@ -14,7 +14,9 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -35,6 +37,7 @@ import static org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction.Response; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -66,7 +69,7 @@ public void testToXContent() throws IOException { ExplainIndexDataStreamLifecycle explainIndex = createRandomIndexDataStreamLifecycleExplanation(now, lifecycle); explainIndex.setNowSupplier(() -> now); { - Response response = new Response(List.of(explainIndex), null); + Response response = new Response(List.of(explainIndex), null, null); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -103,7 +106,7 @@ public void testToXContent() throws IOException { } else { assertThat(explainIndexMap.get("generation_time"), is(nullValue())); } - assertThat(explainIndexMap.get("lifecycle"), is(Map.of("enabled", true))); // empty lifecycle + assertThat(explainIndexMap.get("lifecycle"), is(Map.of("enabled", true))); if (explainIndex.getError() != null) { Map errorObject = (Map) explainIndexMap.get("error"); assertThat(errorObject.get(ErrorEntry.MESSAGE_FIELD.getPreferredName()), is(explainIndex.getError().error())); @@ -132,7 +135,11 @@ public void testToXContent() throws IOException { new MinPrimaryShardDocsCondition(4L) ) ); - Response response = new Response(List.of(explainIndex), new RolloverConfiguration(rolloverConditions)); + Response response = new Response( + List.of(explainIndex), + new RolloverConfiguration(rolloverConditions), + DataStreamTestHelper.randomGlobalRetention() + ); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -186,9 +193,27 @@ public void testToXContent() throws IOException { assertThat(explainIndexMap.get("error"), is(nullValue())); } - Map lifecycleRollover = (Map) ((Map) explainIndexMap.get("lifecycle")).get( - "rollover" - ); + Map lifecycleMap = (Map) explainIndexMap.get("lifecycle"); + assertThat(lifecycleMap.get("data_retention"), nullValue()); + + if (response.getGlobalRetention() == null) { + assertThat(lifecycleMap.get("effective_retention"), nullValue()); + assertThat(lifecycleMap.get("retention_determined_by"), nullValue()); + } else if (response.getGlobalRetention().getDefaultRetention() != null) { + assertThat( + lifecycleMap.get("effective_retention"), + equalTo(response.getGlobalRetention().getDefaultRetention().getStringRep()) + ); + assertThat(lifecycleMap.get("retention_determined_by"), equalTo("default_global_retention")); + } else { + assertThat( + lifecycleMap.get("effective_retention"), + equalTo(response.getGlobalRetention().getMaxRetention().getStringRep()) + ); + assertThat(lifecycleMap.get("retention_determined_by"), equalTo("max_global_retention")); + } + + Map lifecycleRollover = (Map) lifecycleMap.get("rollover"); assertThat(lifecycleRollover.get("min_primary_shard_docs"), is(4)); assertThat(lifecycleRollover.get("max_primary_shard_docs"), is(9)); } @@ -212,7 +237,7 @@ public void testToXContent() throws IOException { ) : null ); - Response response = new Response(List.of(explainIndexWithNullGenerationDate), null); + Response response = new Response(List.of(explainIndexWithNullGenerationDate), null, null); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); response.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xcontent -> { @@ -241,6 +266,7 @@ public void testChunkCount() { createRandomIndexDataStreamLifecycleExplanation(now, lifecycle), createRandomIndexDataStreamLifecycleExplanation(now, lifecycle) ), + null, null ); @@ -296,6 +322,12 @@ private Response randomResponse() { Map.of(MaxPrimaryShardDocsCondition.NAME, new MaxPrimaryShardDocsCondition(randomLongBetween(1000, 199_999_000))) ) ) + : null, + randomBoolean() + ? new DataStreamGlobalRetention( + TimeValue.timeValueDays(randomIntBetween(1, 10)), + TimeValue.timeValueDays(randomIntBetween(10, 20)) + ) : null ); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9184e46b11542..b5070c5cbd065 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -150,6 +150,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_SERIALIZE_BIG_ARRAY = def(8_610_00_0); public static final TransportVersion AUTO_SHARDING_ROLLOVER_CONDITION = def(8_611_00_0); public static final TransportVersion KNN_QUERY_VECTOR_BUILDER = def(8_612_00_0); + public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 7d2dad80bf35a..626feeed161f4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -117,6 +119,8 @@ public static class Response extends ActionResponse implements ToXContentObject private final Map componentTemplates; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(StreamInput in) throws IOException { super(in); @@ -126,16 +130,27 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + globalRetention = in.readOptionalWriteable(DataStreamGlobalRetention::read); + } else { + globalRetention = null; + } } public Response(Map componentTemplates) { this.componentTemplates = componentTemplates; this.rolloverConfiguration = null; + this.globalRetention = null; } - public Response(Map componentTemplates, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + Map componentTemplates, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.componentTemplates = componentTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Map getComponentTemplates() { @@ -146,12 +161,19 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(componentTemplates, StreamOutput::writeWriteable); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -160,23 +182,25 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Response that = (Response) o; return Objects.equals(componentTemplates, that.componentTemplates) - && Objects.equals(rolloverConfiguration, that.rolloverConfiguration); + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(componentTemplates, rolloverConfiguration); + return Objects.hash(componentTemplates, rolloverConfiguration, globalRetention); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(COMPONENT_TEMPLATES.getPreferredName()); for (Map.Entry componentTemplate : this.componentTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), componentTemplate.getKey()); builder.field(COMPONENT_TEMPLATE.getPreferredName()); - componentTemplate.getValue().toXContent(builder, params, rolloverConfiguration); + componentTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index f75443bad2854..515b8f9fd5c1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -118,7 +120,10 @@ public static class Response extends ActionResponse implements ToXContentObject public static final ParseField INDEX_TEMPLATE = new ParseField("index_template"); private final Map indexTemplates; + @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(StreamInput in) throws IOException { super(in); @@ -128,16 +133,27 @@ public Response(StreamInput in) throws IOException { } else { rolloverConfiguration = null; } + if (in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + globalRetention = in.readOptionalWriteable(DataStreamGlobalRetention::read); + } else { + globalRetention = null; + } } public Response(Map indexTemplates) { this.indexTemplates = indexTemplates; this.rolloverConfiguration = null; + this.globalRetention = null; } - public Response(Map indexTemplates, RolloverConfiguration rolloverConfiguration) { + public Response( + Map indexTemplates, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.indexTemplates = indexTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Map indexTemplates() { @@ -150,6 +166,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override @@ -157,23 +176,26 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetComposableIndexTemplateAction.Response that = (GetComposableIndexTemplateAction.Response) o; - return Objects.equals(indexTemplates, that.indexTemplates) && Objects.equals(rolloverConfiguration, that.rolloverConfiguration); + return Objects.equals(indexTemplates, that.indexTemplates) + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(indexTemplates, rolloverConfiguration); + return Objects.hash(indexTemplates, rolloverConfiguration, globalRetention); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(INDEX_TEMPLATES.getPreferredName()); for (Map.Entry indexTemplate : this.indexTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), indexTemplate.getKey()); builder.field(INDEX_TEMPLATE.getPreferredName()); - indexTemplate.getValue().toXContent(builder, params, rolloverConfiguration); + indexTemplate.getValue().toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java index e76dc0f46eea2..d081570b2a365 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComponentTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -96,7 +97,8 @@ protected void masterOperation( listener.onResponse( new GetComponentTemplateAction.Response( results, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + DataStreamGlobalRetention.getFromClusterState(state) ) ); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java index c9b2a23c38828..99360d2eb7bf8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/TransportGetComposableIndexTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -94,7 +95,8 @@ protected void masterOperation( listener.onResponse( new GetComposableIndexTemplateAction.Response( results, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + DataStreamGlobalRetention.getFromClusterState(state) ) ); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index 106f1a7e4f393..378df2d7d53e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,27 +39,35 @@ public class SimulateIndexTemplateResponse extends ActionResponse implements ToX @Nullable // the resolved settings, mappings and aliases for the matched templates, if any - private Template resolvedTemplate; + private final Template resolvedTemplate; @Nullable // a map of template names and their index patterns that would overlap when matching the given index name - private Map> overlappingTemplates; + private final Map> overlappingTemplates; @Nullable - private RolloverConfiguration rolloverConfiguration = null; + private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; - public SimulateIndexTemplateResponse(@Nullable Template resolvedTemplate, @Nullable Map> overlappingTemplates) { - this(resolvedTemplate, overlappingTemplates, null); + public SimulateIndexTemplateResponse( + @Nullable Template resolvedTemplate, + @Nullable Map> overlappingTemplates, + DataStreamGlobalRetention globalRetention + ) { + this(resolvedTemplate, overlappingTemplates, null, globalRetention); } public SimulateIndexTemplateResponse( @Nullable Template resolvedTemplate, @Nullable Map> overlappingTemplates, - @Nullable RolloverConfiguration rolloverConfiguration + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention ) { this.resolvedTemplate = resolvedTemplate; this.overlappingTemplates = overlappingTemplates; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public SimulateIndexTemplateResponse(StreamInput in) throws IOException { @@ -73,9 +83,12 @@ public SimulateIndexTemplateResponse(StreamInput in) throws IOException { } else { this.overlappingTemplates = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { - rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); - } + rolloverConfiguration = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) + ? in.readOptionalWriteable(RolloverConfiguration::new) + : null; + globalRetention = in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null; } @Override @@ -94,14 +107,18 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); if (this.resolvedTemplate != null) { builder.field(TEMPLATE.getPreferredName()); - this.resolvedTemplate.toXContent(builder, params, rolloverConfiguration); + this.resolvedTemplate.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } if (this.overlappingTemplates != null) { builder.startArray(OVERLAPPING.getPreferredName()); @@ -127,12 +144,14 @@ public boolean equals(Object o) { } SimulateIndexTemplateResponse that = (SimulateIndexTemplateResponse) o; return Objects.equals(resolvedTemplate, that.resolvedTemplate) - && Objects.deepEquals(overlappingTemplates, that.overlappingTemplates); + && Objects.deepEquals(overlappingTemplates, that.overlappingTemplates) + && Objects.equals(rolloverConfiguration, that.rolloverConfiguration) + && Objects.equals(globalRetention, that.globalRetention); } @Override public int hashCode() { - return Objects.hash(resolvedTemplate, overlappingTemplates); + return Objects.hash(resolvedTemplate, overlappingTemplates, rolloverConfiguration, globalRetention); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index bee6ab7f78be0..51e17999da5c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -112,6 +113,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final DataStreamGlobalRetention globalRetention = DataStreamGlobalRetention.getFromClusterState(state); final ClusterState stateWithTemplate; if (request.getIndexTemplateRequest() != null) { // we'll "locally" add the template defined by the user in the cluster state (as if it existed in the system) @@ -137,7 +139,7 @@ protected void masterOperation( String matchingTemplate = findV2Template(stateWithTemplate.metadata(), request.getIndexName(), false); if (matchingTemplate == null) { - listener.onResponse(new SimulateIndexTemplateResponse(null, null)); + listener.onResponse(new SimulateIndexTemplateResponse(null, null, null)); return; } @@ -165,11 +167,12 @@ protected void masterOperation( new SimulateIndexTemplateResponse( template, overlapping, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + globalRetention ) ); } else { - listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping)); + listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping, globalRetention)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java index 1f35d0b8a1268..39cf5f43f39ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateTemplateAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; @@ -99,6 +100,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final DataStreamGlobalRetention globalRetention = DataStreamGlobalRetention.getFromClusterState(state); String uuid = UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); final String temporaryIndexName = "simulate_template_index_" + uuid; final ClusterState stateWithTemplate; @@ -176,11 +178,12 @@ protected void masterOperation( new SimulateIndexTemplateResponse( template, overlapping, - clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING) + clusterSettings.get(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING), + globalRetention ) ); } else { - listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping)); + listener.onResponse(new SimulateIndexTemplateResponse(template, overlapping, globalRetention)); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 8c469f7dffc4d..36f2ff4fffa96 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -19,6 +19,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAutoShardingEvent; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -276,14 +278,19 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** - * Converts the response to XContent and passes the RolloverConditions, when provided, to the data stream. + * Converts the response to XContent and passes the RolloverConditions and the global retention, when provided, + * to the data stream. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(DataStream.NAME_FIELD.getPreferredName(), dataStream.getName()); builder.field(DataStream.TIMESTAMP_FIELD_FIELD.getPreferredName()) @@ -339,7 +346,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (dataStream.getLifecycle() != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - dataStream.getLifecycle().toXContent(builder, params, rolloverConfiguration); + dataStream.getLifecycle().toXContent(builder, params, rolloverConfiguration, globalRetention); } if (ilmPolicyName != null) { builder.field(ILM_POLICY_FIELD.getPreferredName(), ilmPolicyName); @@ -483,20 +490,30 @@ public void writeTo(StreamOutput out) throws IOException { private final List dataStreams; @Nullable private final RolloverConfiguration rolloverConfiguration; + @Nullable + private final DataStreamGlobalRetention globalRetention; public Response(List dataStreams) { - this(dataStreams, null); + this(dataStreams, null, null); } - public Response(List dataStreams, @Nullable RolloverConfiguration rolloverConfiguration) { + public Response( + List dataStreams, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) { this.dataStreams = dataStreams; this.rolloverConfiguration = rolloverConfiguration; + this.globalRetention = globalRetention; } public Response(StreamInput in) throws IOException { this( in.readCollectionAsList(DataStreamInfo::new), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null, + in.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION) + ? in.readOptionalWriteable(DataStreamGlobalRetention::read) + : null ); } @@ -509,20 +526,29 @@ public RolloverConfiguration getRolloverConfiguration() { return rolloverConfiguration; } + @Nullable + public DataStreamGlobalRetention getGlobalRetention() { + return globalRetention; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreams); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } + if (out.getTransportVersion().onOrAfter(TransportVersions.USE_DATA_STREAM_GLOBAL_RETENTION)) { + out.writeOptionalWriteable(globalRetention); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + Params withEffectiveRetentionParams = new DelegatingMapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, params); builder.startObject(); builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); for (DataStreamInfo dataStream : dataStreams) { - dataStream.toXContent(builder, params, rolloverConfiguration); + dataStream.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } builder.endArray(); builder.endObject(); @@ -534,12 +560,14 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Response response = (Response) o; - return dataStreams.equals(response.dataStreams) && Objects.equals(rolloverConfiguration, response.rolloverConfiguration); + return dataStreams.equals(response.dataStreams) + && Objects.equals(rolloverConfiguration, response.rolloverConfiguration) + && Objects.equals(globalRetention, response.globalRetention); } @Override public int hashCode() { - return Objects.hash(dataStreams, rolloverConfiguration); + return Objects.hash(dataStreams, rolloverConfiguration, globalRetention); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java index 640d8a9efe8ac..2b79377fb71e0 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ExplainIndexDataStreamLifecycle.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.datastreams.lifecycle; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -98,11 +99,15 @@ public ExplainIndexDataStreamLifecycle(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(INDEX_FIELD.getPreferredName(), index); builder.field(MANAGED_BY_LIFECYCLE_FIELD.getPreferredName(), managedByLifecycle); @@ -127,7 +132,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (this.lifecycle != null) { builder.field(LIFECYCLE_FIELD.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + Params withEffectiveRetentionParams = new DelegatingMapParams( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS, + params + ); + lifecycle.toXContent(builder, withEffectiveRetentionParams, rolloverConfiguration, globalRetention); } if (this.error != null) { if (error.firstOccurrenceTimestamp() != -1L && error.recordedTimestamp() != -1L && error.retryCount() != -1) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java index d3d758e110ff3..a11ec64dc6f2c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java @@ -163,17 +163,21 @@ public String toString() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the component template to XContent and passes the RolloverConditions, when provided, to the template. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.field(TEMPLATE.getPreferredName()); - this.template.toXContent(builder, params, rolloverConfiguration); + this.template.toXContent(builder, params, rolloverConfiguration, globalRetention); if (this.version != null) { builder.field(VERSION.getPreferredName(), this.version); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 7702ec0ac0b5c..8e8e6fff4cc6a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -259,19 +259,23 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the composable index template to XContent and passes the RolloverConditions, when provided, to the template. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); builder.stringListField(INDEX_PATTERNS.getPreferredName(), this.indexPatterns); if (this.template != null) { builder.field(TEMPLATE.getPreferredName()); - this.template.toXContent(builder, params, rolloverConfiguration); + this.template.toXContent(builder, params, rolloverConfiguration, globalRetention); } if (this.componentTemplates != null) { builder.stringListField(COMPOSED_OF.getPreferredName(), this.componentTemplates); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java index a8b094bafde2e..6db7b2cf670bc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; @@ -53,6 +54,14 @@ public class DataStreamLifecycle implements SimpleDiffable, public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_10_X; public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; + // The following XContent params are used to enrich the DataStreamLifecycle json with effective retention information + // This should be set only when the lifecycle is used in a response to the user and NEVER when we expect the json to + // be deserialized. + public static final String INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME = "include_effective_retention"; + public static final Map INCLUDE_EFFECTIVE_RETENTION_PARAMS = Map.of( + DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME, + "true" + ); /** * Check if {@link #DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME} is present and set to {@code true}, indicating that @@ -79,6 +88,8 @@ public static boolean isDataStreamsLifecycleOnlyMode(final Settings settings) { public static final ParseField ENABLED_FIELD = new ParseField("enabled"); public static final ParseField DATA_RETENTION_FIELD = new ParseField("data_retention"); + public static final ParseField EFFECTIVE_RETENTION_FIELD = new ParseField("effective_retention"); + public static final ParseField RETENTION_SOURCE_FIELD = new ParseField("retention_determined_by"); public static final ParseField DOWNSAMPLING_FIELD = new ParseField("downsampling"); private static final ParseField ROLLOVER_FIELD = new ParseField("rollover"); @@ -130,17 +141,6 @@ public boolean isEnabled() { return enabled; } - /** - * The least amount of time data should be kept by elasticsearch. - * @return the time period or null, null represents that data should never be deleted. - * @deprecated use {@link #getEffectiveDataRetention(DataStreamGlobalRetention)} - */ - @Deprecated - @Nullable - public TimeValue getEffectiveDataRetention() { - return getEffectiveDataRetention(null); - } - /** * The least amount of time data should be kept by elasticsearch. * @return the time period or null, null represents that data should never be deleted. @@ -275,17 +275,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } /** - * Converts the data stream lifecycle to XContent and injects the RolloverConditions if they exist. - * @deprecated use {@link #toXContent(XContentBuilder, Params, RolloverConfiguration, DataStreamGlobalRetention)} - */ - @Deprecated - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { - return toXContent(builder, params, rolloverConfiguration, null); - } - - /** - * Converts the data stream lifecycle to XContent and injects the RolloverConditions and the global retention if they exist. + * Converts the data stream lifecycle to XContent, enriches it with effective retention information when requested + * and injects the RolloverConditions if they exist. + * In order to request the effective retention you need to set {@link #INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME} to true + * in the XContent params. */ public XContentBuilder toXContent( XContentBuilder builder, @@ -302,6 +295,14 @@ public XContentBuilder toXContent( builder.field(DATA_RETENTION_FIELD.getPreferredName(), dataRetention.value().getStringRep()); } } + if (params.paramAsBoolean(INCLUDE_EFFECTIVE_RETENTION_PARAM_NAME, false)) { + Tuple effectiveRetention = getEffectiveDataRetentionWithSource(globalRetention); + if (effectiveRetention.v1() != null) { + builder.field(EFFECTIVE_RETENTION_FIELD.getPreferredName(), effectiveRetention.v1().getStringRep()); + builder.field(RETENTION_SOURCE_FIELD.getPreferredName(), effectiveRetention.v2().displayName()); + } + } + if (downsampling != null) { builder.field(DOWNSAMPLING_FIELD.getPreferredName()); downsampling.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index 18a99f984707f..74627e27032b4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -213,14 +213,18 @@ public String toString() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return toXContent(builder, params, null); + return toXContent(builder, params, null, null); } /** * Converts the template to XContent and passes the RolloverConditions, when provided, to the lifecycle. */ - public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nullable RolloverConfiguration rolloverConfiguration) - throws IOException { + public XContentBuilder toXContent( + XContentBuilder builder, + Params params, + @Nullable RolloverConfiguration rolloverConfiguration, + @Nullable DataStreamGlobalRetention globalRetention + ) throws IOException { builder.startObject(); if (this.settings != null) { builder.startObject(SETTINGS.getPreferredName()); @@ -250,7 +254,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params, @Nulla } if (this.lifecycle != null) { builder.field(LIFECYCLE.getPreferredName()); - lifecycle.toXContent(builder, params, rolloverConfiguration); + lifecycle.toXContent(builder, params, rolloverConfiguration, globalRetention); } builder.endObject(); return builder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java index 3b24f90b9d854..2af4bf5016ad2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java @@ -8,15 +8,34 @@ package org.elasticsearch.action.admin.indices.template.get; +import org.elasticsearch.action.admin.indices.rollover.RolloverConfigurationTests; +import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComponentTemplateTests; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSerializationTests; +import org.elasticsearch.cluster.metadata.DataStreamLifecycle; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; -import java.util.Collections; +import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomAliases; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomMappings; +import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomSettings; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.not; + public class GetComponentTemplateResponseTests extends AbstractWireSerializingTestCase { @Override protected Writeable.Reader instanceReader() { @@ -25,18 +44,84 @@ protected Writeable.Reader instanceReader() @Override protected GetComponentTemplateAction.Response createTestInstance() { + return new GetComponentTemplateAction.Response( + randomBoolean() ? Map.of() : randomTemplates(), + RolloverConfigurationTests.randomRolloverConditions(), + DataStreamGlobalRetentionSerializationTests.randomGlobalRetention() + ); + } + + @Override + protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) { + var templates = instance.getComponentTemplates(); + var rolloverConditions = instance.getRolloverConfiguration(); + var globalRetention = instance.getGlobalRetention(); + switch (randomInt(2)) { + case 0 -> templates = templates == null ? randomTemplates() : null; + case 1 -> rolloverConditions = randomValueOtherThan(rolloverConditions, RolloverConfigurationTests::randomRolloverConditions); + case 2 -> globalRetention = randomValueOtherThan( + globalRetention, + DataStreamGlobalRetentionSerializationTests::randomGlobalRetention + ); + } + return new GetComponentTemplateAction.Response(templates, rolloverConditions, globalRetention); + } + + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { + Settings settings = null; + CompressedXContent mappings = null; + Map aliases = null; + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); if (randomBoolean()) { - return new GetComponentTemplateAction.Response(Collections.emptyMap()); + settings = randomSettings(); } - Map templates = new HashMap<>(); - for (int i = 0; i < randomIntBetween(1, 4); i++) { - templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance()); + if (randomBoolean()) { + mappings = randomMappings(); + } + if (randomBoolean()) { + aliases = randomAliases(); + } + + var template = new ComponentTemplate( + new Template(settings, mappings, aliases, lifecycle), + randomBoolean() ? null : randomNonNegativeLong(), + null, + false + ); + var globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + var rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); + var response = new GetComponentTemplateAction.Response( + Map.of(randomAlphaOfLength(10), template), + rolloverConfiguration, + globalRetention + ); + + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + String serialized = Strings.toString(builder); + assertThat(serialized, containsString("rollover")); + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) + .getConditions() + .keySet()) { + assertThat(serialized, containsString(label)); + } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } - return new GetComponentTemplateAction.Response(templates); } @Override - protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) { - return randomValueOtherThan(instance, this::createTestInstance); + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(IndicesModule.getNamedWriteables()); + } + + private static Map randomTemplates() { + Map templates = new HashMap<>(); + for (int i = 0; i < randomIntBetween(1, 4); i++) { + templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance()); + } + return templates; } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java index 8b9ef91923839..7efa624b49148 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java @@ -32,6 +32,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class ComponentTemplateTests extends SimpleDiffableSerializationTestCase { @Override @@ -112,7 +113,7 @@ public static Map randomAliases() { return Collections.singletonMap(aliasName, aliasMeta); } - private static CompressedXContent randomMappings() { + public static CompressedXContent randomMappings() { try { return new CompressedXContent("{\"properties\":{\"" + randomAlphaOfLength(5) + "\":{\"type\":\"keyword\"}}}"); } catch (IOException e) { @@ -121,7 +122,7 @@ private static CompressedXContent randomMappings() { } } - private static Settings randomSettings() { + public static Settings randomSettings() { return indexSettings(randomIntBetween(1, 10), randomIntBetween(0, 5)).put(IndexMetadata.SETTING_BLOCKS_READ, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) .put(IndexMetadata.SETTING_BLOCKS_WRITE, randomBoolean()) @@ -265,7 +266,7 @@ public void testMappingsEquals() throws IOException { } } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { Settings settings = null; CompressedXContent mappings = null; Map aliases = null; @@ -278,7 +279,7 @@ public void testXContentSerializationWithRollover() throws IOException { if (randomBoolean()) { aliases = randomAliases(); } - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); ComponentTemplate template = new ComponentTemplate( new Template(settings, mappings, aliases, lifecycle), randomNonNegativeLong(), @@ -288,14 +289,19 @@ public void testXContentSerializationWithRollover() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); - for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention()) + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) .getConditions() .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index fe678ec23afad..6485634f879ba 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -30,6 +30,7 @@ import static org.elasticsearch.cluster.metadata.DataStream.TIMESTAMP_FIELD_NAME; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class ComposableIndexTemplateTests extends SimpleDiffableSerializationTestCase { @Override @@ -109,10 +110,6 @@ private static Map randomAliases() { return Collections.singletonMap(aliasName, aliasMeta); } - private static DataStreamLifecycle randomLifecycle() { - return DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); - } - private static CompressedXContent randomMappings(ComposableIndexTemplate.DataStreamTemplate dataStreamTemplate) { try { if (dataStreamTemplate != null) { @@ -212,7 +209,7 @@ public void testComponentTemplatesEquals() { assertThat(ComposableIndexTemplate.componentTemplatesEquals(List.of(), List.of(randomAlphaOfLength(5))), equalTo(false)); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { Settings settings = null; CompressedXContent mappings = null; Map aliases = null; @@ -226,7 +223,8 @@ public void testXContentSerializationWithRollover() throws IOException { if (randomBoolean()) { aliases = randomAliases(); } - DataStreamLifecycle lifecycle = randomLifecycle(); + // We use the empty lifecycle so the global retention can be in effect + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); Template template = new Template(settings, mappings, aliases, lifecycle); ComposableIndexTemplate.builder() .indexPatterns(List.of(randomAlphaOfLength(4))) @@ -240,14 +238,19 @@ public void testXContentSerializationWithRollover() throws IOException { try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); - for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention()) + for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) .getConditions() .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java index 491ba868dfd9b..5cd104f1f59b5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java @@ -75,9 +75,10 @@ protected ClusterState.Custom mutateInstance(ClusterState.Custom instance) { } public static DataStreamGlobalRetention randomGlobalRetention() { + boolean withDefault = randomBoolean(); return new DataStreamGlobalRetention( - randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1, 1000)), - randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1000, 2000)) + withDefault == false ? null : TimeValue.timeValueDays(randomIntBetween(1, 1000)), + withDefault && randomBoolean() ? null : TimeValue.timeValueDays(randomIntBetween(1000, 2000)) ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java index b266addc37407..d389131e6b294 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java @@ -39,6 +39,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.RetentionSource.MAX_GLOBAL_RETENTION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class DataStreamLifecycleTests extends AbstractXContentSerializingTestCase { @@ -106,13 +107,14 @@ protected DataStreamLifecycle doParseInstance(XContentParser parser) throws IOEx return DataStreamLifecycle.fromXContent(parser); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { DataStreamLifecycle lifecycle = createTestInstance(); try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); - lifecycle.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration, globalRetention); + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + lifecycle.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) @@ -124,6 +126,13 @@ public void testXContentSerializationWithRollover() throws IOException { if (rolloverConfiguration.getAutomaticConditions().isEmpty() == false) { assertThat(serialized, containsString("[automatic]")); } + // We check that even if there was no retention provided by the user, the global retention applies + if (lifecycle.getDataRetention() == null) { + assertThat(serialized, not(containsString("data_retention"))); + } else { + assertThat(serialized, containsString("data_retention")); + } + assertThat(serialized, containsString("effective_retention")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 3e758df17c432..a1a523ddb584d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -1660,7 +1660,7 @@ private DataStream createDataStream( return newInstance(dataStreamName, backingIndices, backingIndicesCount, null, false, lifecycle); } - public void testXContentSerializationWithRollover() throws IOException { + public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException { String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); List indices = randomIndexInstances(); long generation = indices.size() + ESTestCase.randomLongBetween(1, 128); @@ -1675,7 +1675,7 @@ public void testXContentSerializationWithRollover() throws IOException { failureIndices = randomNonEmptyIndexInstances(); } - DataStreamLifecycle lifecycle = DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build(); + DataStreamLifecycle lifecycle = new DataStreamLifecycle(); DataStream dataStream = new DataStream( dataStreamName, indices, @@ -1698,7 +1698,9 @@ public void testXContentSerializationWithRollover() throws IOException { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); - dataStream.toXContent(builder, ToXContent.EMPTY_PARAMS, rolloverConfiguration, globalRetention); + + ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); + dataStream.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); assertThat(serialized, containsString("rollover")); for (String label : rolloverConfiguration.resolveRolloverConditions(lifecycle.getEffectiveDataRetention(globalRetention)) @@ -1706,6 +1708,9 @@ public void testXContentSerializationWithRollover() throws IOException { .keySet()) { assertThat(serialized, containsString(label)); } + // We check that even if there was no retention provided by the user, the global retention applies + assertThat(serialized, not(containsString("data_retention"))); + assertThat(serialized, containsString("effective_retention")); } } From a1305373f2d8391387ba2138153f3612c7ce7f18 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 20 Mar 2024 16:32:12 -0400 Subject: [PATCH 075/214] ESQL: Use generated docs for abs and acos (#106510) ESQL: Use generated docs for abs and acos --- docs/reference/esql/functions/abs.asciidoc | 39 ------------------- docs/reference/esql/functions/acos.asciidoc | 31 --------------- .../esql/functions/description/acos.asciidoc | 2 +- .../esql/functions/description/left.asciidoc | 2 +- .../esql/functions/description/log.asciidoc | 2 +- .../description/st_intersects.asciidoc | 5 +++ .../esql/functions/examples/abs.asciidoc | 21 ++++++++++ .../esql/functions/examples/acos.asciidoc | 13 +++++++ .../esql/functions/layout/abs.asciidoc | 1 + .../esql/functions/layout/acos.asciidoc | 1 + .../functions/layout/st_intersects.asciidoc | 14 +++++++ .../esql/functions/math-functions.asciidoc | 4 +- .../esql/functions/parameters/abs.asciidoc | 4 +- .../esql/functions/parameters/acos.asciidoc | 4 +- .../esql/functions/parameters/asin.asciidoc | 2 +- .../esql/functions/parameters/atan.asciidoc | 2 +- .../esql/functions/parameters/atan2.asciidoc | 4 +- .../esql/functions/parameters/case.asciidoc | 2 +- .../esql/functions/parameters/ceil.asciidoc | 2 +- .../functions/parameters/coalesce.asciidoc | 4 +- .../esql/functions/parameters/concat.asciidoc | 4 +- .../esql/functions/parameters/cos.asciidoc | 2 +- .../esql/functions/parameters/cosh.asciidoc | 2 +- .../parameters/date_extract.asciidoc | 4 +- .../esql/functions/parameters/floor.asciidoc | 2 +- .../esql/functions/parameters/left.asciidoc | 2 +- .../esql/functions/parameters/length.asciidoc | 2 +- .../esql/functions/parameters/log.asciidoc | 2 +- .../esql/functions/parameters/log10.asciidoc | 2 +- .../esql/functions/parameters/ltrim.asciidoc | 2 +- .../esql/functions/parameters/mv_avg.asciidoc | 2 +- .../functions/parameters/mv_concat.asciidoc | 2 +- .../functions/parameters/mv_count.asciidoc | 2 +- .../functions/parameters/mv_dedupe.asciidoc | 2 +- .../functions/parameters/mv_first.asciidoc | 2 +- .../functions/parameters/mv_last.asciidoc | 2 +- .../esql/functions/parameters/mv_max.asciidoc | 2 +- .../functions/parameters/mv_median.asciidoc | 2 +- .../esql/functions/parameters/mv_min.asciidoc | 2 +- .../functions/parameters/mv_slice.asciidoc | 2 +- .../esql/functions/parameters/mv_sum.asciidoc | 2 +- .../esql/functions/parameters/mv_zip.asciidoc | 4 +- .../functions/parameters/replace.asciidoc | 4 +- .../esql/functions/parameters/right.asciidoc | 2 +- .../esql/functions/parameters/round.asciidoc | 2 +- .../esql/functions/parameters/rtrim.asciidoc | 2 +- .../esql/functions/parameters/sin.asciidoc | 2 +- .../esql/functions/parameters/sinh.asciidoc | 2 +- .../esql/functions/parameters/split.asciidoc | 2 +- .../esql/functions/parameters/sqrt.asciidoc | 2 +- .../parameters/st_intersects.asciidoc | 7 ++++ .../functions/parameters/substring.asciidoc | 2 +- .../esql/functions/parameters/tan.asciidoc | 2 +- .../esql/functions/parameters/tanh.asciidoc | 2 +- .../functions/parameters/to_boolean.asciidoc | 2 +- .../parameters/to_cartesianpoint.asciidoc | 2 +- .../parameters/to_cartesianshape.asciidoc | 2 +- .../functions/parameters/to_datetime.asciidoc | 2 +- .../functions/parameters/to_degrees.asciidoc | 2 +- .../functions/parameters/to_double.asciidoc | 2 +- .../functions/parameters/to_geopoint.asciidoc | 2 +- .../functions/parameters/to_geoshape.asciidoc | 2 +- .../functions/parameters/to_integer.asciidoc | 2 +- .../esql/functions/parameters/to_ip.asciidoc | 2 +- .../functions/parameters/to_long.asciidoc | 2 +- .../functions/parameters/to_radians.asciidoc | 2 +- .../functions/parameters/to_string.asciidoc | 2 +- .../parameters/to_unsigned_long.asciidoc | 2 +- .../functions/parameters/to_version.asciidoc | 2 +- .../esql/functions/parameters/trim.asciidoc | 2 +- .../esql/functions/signature/abs.svg | 2 +- .../esql/functions/signature/acos.svg | 2 +- .../esql/functions/signature/asin.svg | 2 +- .../esql/functions/signature/atan.svg | 2 +- .../esql/functions/signature/atan2.svg | 2 +- .../esql/functions/signature/ceil.svg | 2 +- .../esql/functions/signature/coalesce.svg | 2 +- .../esql/functions/signature/cos.svg | 2 +- .../esql/functions/signature/cosh.svg | 2 +- .../esql/functions/signature/date_extract.svg | 2 +- .../esql/functions/signature/floor.svg | 2 +- .../esql/functions/signature/left.svg | 2 +- .../esql/functions/signature/length.svg | 2 +- .../esql/functions/signature/log.svg | 2 +- .../esql/functions/signature/log10.svg | 2 +- .../esql/functions/signature/ltrim.svg | 2 +- .../esql/functions/signature/mv_avg.svg | 2 +- .../esql/functions/signature/mv_concat.svg | 2 +- .../esql/functions/signature/mv_count.svg | 2 +- .../esql/functions/signature/mv_dedupe.svg | 2 +- .../esql/functions/signature/mv_first.svg | 2 +- .../esql/functions/signature/mv_last.svg | 2 +- .../esql/functions/signature/mv_max.svg | 2 +- .../esql/functions/signature/mv_median.svg | 2 +- .../esql/functions/signature/mv_min.svg | 2 +- .../esql/functions/signature/mv_slice.svg | 2 +- .../esql/functions/signature/mv_sum.svg | 2 +- .../esql/functions/signature/mv_zip.svg | 2 +- .../esql/functions/signature/replace.svg | 2 +- .../esql/functions/signature/right.svg | 2 +- .../esql/functions/signature/round.svg | 2 +- .../esql/functions/signature/rtrim.svg | 2 +- .../esql/functions/signature/sin.svg | 2 +- .../esql/functions/signature/sinh.svg | 2 +- .../esql/functions/signature/split.svg | 2 +- .../esql/functions/signature/sqrt.svg | 2 +- .../esql/functions/signature/substring.svg | 2 +- .../esql/functions/signature/tan.svg | 2 +- .../esql/functions/signature/tanh.svg | 2 +- .../esql/functions/signature/to_boolean.svg | 2 +- .../functions/signature/to_cartesianpoint.svg | 2 +- .../functions/signature/to_cartesianshape.svg | 2 +- .../esql/functions/signature/to_datetime.svg | 2 +- .../esql/functions/signature/to_degrees.svg | 2 +- .../esql/functions/signature/to_double.svg | 2 +- .../esql/functions/signature/to_geopoint.svg | 2 +- .../esql/functions/signature/to_geoshape.svg | 2 +- .../esql/functions/signature/to_integer.svg | 2 +- .../esql/functions/signature/to_ip.svg | 2 +- .../esql/functions/signature/to_long.svg | 2 +- .../esql/functions/signature/to_radians.svg | 2 +- .../esql/functions/signature/to_string.svg | 2 +- .../functions/signature/to_unsigned_long.svg | 2 +- .../esql/functions/signature/to_version.svg | 2 +- .../esql/functions/signature/trim.svg | 2 +- .../esql/functions/types/abs.asciidoc | 2 +- .../esql/functions/types/acos.asciidoc | 2 +- .../esql/functions/types/asin.asciidoc | 2 +- .../esql/functions/types/atan.asciidoc | 2 +- .../esql/functions/types/atan2.asciidoc | 2 +- .../esql/functions/types/case.asciidoc | 2 +- .../esql/functions/types/ceil.asciidoc | 2 +- .../esql/functions/types/coalesce.asciidoc | 2 +- .../esql/functions/types/cos.asciidoc | 2 +- .../esql/functions/types/cosh.asciidoc | 2 +- .../functions/types/date_extract.asciidoc | 2 +- .../esql/functions/types/floor.asciidoc | 2 +- .../esql/functions/types/left.asciidoc | 2 +- .../esql/functions/types/length.asciidoc | 2 +- .../esql/functions/types/log.asciidoc | 2 +- .../esql/functions/types/log10.asciidoc | 2 +- .../esql/functions/types/ltrim.asciidoc | 2 +- .../esql/functions/types/mv_avg.asciidoc | 2 +- .../esql/functions/types/mv_concat.asciidoc | 2 +- .../esql/functions/types/mv_count.asciidoc | 2 +- .../esql/functions/types/mv_dedupe.asciidoc | 2 +- .../esql/functions/types/mv_first.asciidoc | 2 +- .../esql/functions/types/mv_last.asciidoc | 2 +- .../esql/functions/types/mv_max.asciidoc | 2 +- .../esql/functions/types/mv_median.asciidoc | 2 +- .../esql/functions/types/mv_min.asciidoc | 2 +- .../esql/functions/types/mv_slice.asciidoc | 2 +- .../esql/functions/types/mv_sum.asciidoc | 2 +- .../esql/functions/types/mv_zip.asciidoc | 2 +- .../esql/functions/types/replace.asciidoc | 2 +- .../esql/functions/types/right.asciidoc | 2 +- .../esql/functions/types/round.asciidoc | 2 +- .../esql/functions/types/rtrim.asciidoc | 2 +- .../esql/functions/types/sin.asciidoc | 2 +- .../esql/functions/types/sinh.asciidoc | 2 +- .../esql/functions/types/split.asciidoc | 2 +- .../esql/functions/types/sqrt.asciidoc | 2 +- .../functions/types/st_intersects.asciidoc | 4 ++ .../esql/functions/types/substring.asciidoc | 2 +- .../esql/functions/types/tan.asciidoc | 2 +- .../esql/functions/types/tanh.asciidoc | 2 +- .../esql/functions/types/to_boolean.asciidoc | 2 +- .../types/to_cartesianpoint.asciidoc | 2 +- .../types/to_cartesianshape.asciidoc | 2 +- .../esql/functions/types/to_datetime.asciidoc | 2 +- .../esql/functions/types/to_degrees.asciidoc | 2 +- .../esql/functions/types/to_double.asciidoc | 2 +- .../esql/functions/types/to_geopoint.asciidoc | 2 +- .../esql/functions/types/to_geoshape.asciidoc | 2 +- .../esql/functions/types/to_integer.asciidoc | 2 +- .../esql/functions/types/to_ip.asciidoc | 2 +- .../esql/functions/types/to_long.asciidoc | 2 +- .../esql/functions/types/to_radians.asciidoc | 2 +- .../esql/functions/types/to_string.asciidoc | 2 +- .../functions/types/to_unsigned_long.asciidoc | 2 +- .../esql/functions/types/to_version.asciidoc | 2 +- .../esql/functions/types/trim.asciidoc | 2 +- .../src/main/resources/math.csv-spec | 20 +++++----- .../src/main/resources/meta.csv-spec | 4 +- .../expression/function/scalar/math/Abs.java | 16 +++++++- .../expression/function/scalar/math/Acos.java | 9 ++++- 186 files changed, 280 insertions(+), 267 deletions(-) delete mode 100644 docs/reference/esql/functions/abs.asciidoc delete mode 100644 docs/reference/esql/functions/acos.asciidoc create mode 100644 docs/reference/esql/functions/description/st_intersects.asciidoc create mode 100644 docs/reference/esql/functions/examples/abs.asciidoc create mode 100644 docs/reference/esql/functions/examples/acos.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_intersects.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_intersects.asciidoc diff --git a/docs/reference/esql/functions/abs.asciidoc b/docs/reference/esql/functions/abs.asciidoc deleted file mode 100644 index 4913d3219f0ee..0000000000000 --- a/docs/reference/esql/functions/abs.asciidoc +++ /dev/null @@ -1,39 +0,0 @@ -[discrete] -[[esql-abs]] -=== `ABS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/abs.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the absolute value. - -include::types/abs.asciidoc[] - -*Examples* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=docsAbs] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=docsAbs-result] -|=== - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees-result] -|=== diff --git a/docs/reference/esql/functions/acos.asciidoc b/docs/reference/esql/functions/acos.asciidoc deleted file mode 100644 index 9be03f830bbd7..0000000000000 --- a/docs/reference/esql/functions/acos.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-acos]] -=== `ACOS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/acos.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an -angle, expressed in radians. - -include::types/acos.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=acos] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=acos-result] -|=== diff --git a/docs/reference/esql/functions/description/acos.asciidoc b/docs/reference/esql/functions/description/acos.asciidoc index 8b539c7b377c9..a393c369d9429 100644 --- a/docs/reference/esql/functions/description/acos.asciidoc +++ b/docs/reference/esql/functions/description/acos.asciidoc @@ -2,4 +2,4 @@ *Description* -The arccosine of an angle, expressed in radians. +Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/left.asciidoc b/docs/reference/esql/functions/description/left.asciidoc index 0c06cdf5cb5e9..fdb76e9ef3acd 100644 --- a/docs/reference/esql/functions/description/left.asciidoc +++ b/docs/reference/esql/functions/description/left.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the substring that extracts 'length' chars from 'str' starting from the left. +Returns the substring that extracts 'length' chars from 'string' starting from the left. diff --git a/docs/reference/esql/functions/description/log.asciidoc b/docs/reference/esql/functions/description/log.asciidoc index 85184be160fec..97a79cbdaee54 100644 --- a/docs/reference/esql/functions/description/log.asciidoc +++ b/docs/reference/esql/functions/description/log.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the logarithm of a value to a base. +Returns the logarithm of a number to a base. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc new file mode 100644 index 0000000000000..f22a559baad3a --- /dev/null +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Description* + +Returns whether the two geometries or geometry columns intersect. diff --git a/docs/reference/esql/functions/examples/abs.asciidoc b/docs/reference/esql/functions/examples/abs.asciidoc new file mode 100644 index 0000000000000..6e2ce86e8b428 --- /dev/null +++ b/docs/reference/esql/functions/examples/abs.asciidoc @@ -0,0 +1,21 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=abs] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=abs-result] +|=== +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=abs-employees] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=abs-employees-result] +|=== + diff --git a/docs/reference/esql/functions/examples/acos.asciidoc b/docs/reference/esql/functions/examples/acos.asciidoc new file mode 100644 index 0000000000000..947262b2f3ff7 --- /dev/null +++ b/docs/reference/esql/functions/examples/acos.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=acos] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=acos-result] +|=== + diff --git a/docs/reference/esql/functions/layout/abs.asciidoc b/docs/reference/esql/functions/layout/abs.asciidoc index 672fe555e276b..903266ae54342 100644 --- a/docs/reference/esql/functions/layout/abs.asciidoc +++ b/docs/reference/esql/functions/layout/abs.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/abs.svg[Embedded,opts=inline] include::../parameters/abs.asciidoc[] include::../description/abs.asciidoc[] include::../types/abs.asciidoc[] +include::../examples/abs.asciidoc[] diff --git a/docs/reference/esql/functions/layout/acos.asciidoc b/docs/reference/esql/functions/layout/acos.asciidoc index 427223c6f9bca..66e540dc766d6 100644 --- a/docs/reference/esql/functions/layout/acos.asciidoc +++ b/docs/reference/esql/functions/layout/acos.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/acos.svg[Embedded,opts=inline] include::../parameters/acos.asciidoc[] include::../description/acos.asciidoc[] include::../types/acos.asciidoc[] +include::../examples/acos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_intersects.asciidoc b/docs/reference/esql/functions/layout/st_intersects.asciidoc new file mode 100644 index 0000000000000..e62171330abfc --- /dev/null +++ b/docs/reference/esql/functions/layout/st_intersects.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +[discrete] +[[esql-st_intersects]] +=== `ST_INTERSECTS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] + +include::../parameters/st_intersects.asciidoc[] +include::../description/st_intersects.asciidoc[] +include::../types/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 0ddf7412db2a1..5891a4f9a8af7 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -31,8 +31,8 @@ * <> // end::math_list[] -include::abs.asciidoc[] -include::acos.asciidoc[] +include::layout/abs.asciidoc[] +include::layout/acos.asciidoc[] include::asin.asciidoc[] include::atan.asciidoc[] include::atan2.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/abs.asciidoc b/docs/reference/esql/functions/parameters/abs.asciidoc index 5e41a08473a4e..8527c7f74bb09 100644 --- a/docs/reference/esql/functions/parameters/abs.asciidoc +++ b/docs/reference/esql/functions/parameters/abs.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: - +`number`:: +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/acos.asciidoc b/docs/reference/esql/functions/parameters/acos.asciidoc index 02089d079f6e5..2d06f7e70333d 100644 --- a/docs/reference/esql/functions/parameters/acos.asciidoc +++ b/docs/reference/esql/functions/parameters/acos.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: -Number between -1 and 1 +`number`:: +Number between -1 and 1. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc index 02089d079f6e5..df4830d9e1cc3 100644 --- a/docs/reference/esql/functions/parameters/asin.asciidoc +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: Number between -1 and 1 diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc index c76d5ab2b5d98..087f73688a093 100644 --- a/docs/reference/esql/functions/parameters/atan.asciidoc +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: A number diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc index ab5921fe2482a..ab5268fdab612 100644 --- a/docs/reference/esql/functions/parameters/atan2.asciidoc +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -1,7 +1,7 @@ *Parameters* -`y`:: +`y_coordinate`:: y coordinate -`x`:: +`x_coordinate`:: x coordinate diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index 8c24eef308860..fb70278c17d1a 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -3,5 +3,5 @@ `condition`:: -`rest`:: +`trueValue`:: diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc index 5e41a08473a4e..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/ceil.asciidoc +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 3a3a8ac917984..07c8a84ed5583 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -1,7 +1,7 @@ *Parameters* -`expression`:: +`first`:: Expression to evaluate -`expressionX`:: +`rest`:: Other expression to evaluate diff --git a/docs/reference/esql/functions/parameters/concat.asciidoc b/docs/reference/esql/functions/parameters/concat.asciidoc index 55c75eae0de74..47a555fbe80c6 100644 --- a/docs/reference/esql/functions/parameters/concat.asciidoc +++ b/docs/reference/esql/functions/parameters/concat.asciidoc @@ -1,7 +1,7 @@ *Parameters* -`first`:: +`string1`:: -`rest`:: +`string2`:: diff --git a/docs/reference/esql/functions/parameters/cos.asciidoc b/docs/reference/esql/functions/parameters/cos.asciidoc index 6c943569ecd04..eceab83443236 100644 --- a/docs/reference/esql/functions/parameters/cos.asciidoc +++ b/docs/reference/esql/functions/parameters/cos.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: An angle, in radians diff --git a/docs/reference/esql/functions/parameters/cosh.asciidoc b/docs/reference/esql/functions/parameters/cosh.asciidoc index 65d9408644a82..1535b0feb8424 100644 --- a/docs/reference/esql/functions/parameters/cosh.asciidoc +++ b/docs/reference/esql/functions/parameters/cosh.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: The number who's hyperbolic cosine is to be returned diff --git a/docs/reference/esql/functions/parameters/date_extract.asciidoc b/docs/reference/esql/functions/parameters/date_extract.asciidoc index 7a2c2c4dcc9af..170bc40d89ef6 100644 --- a/docs/reference/esql/functions/parameters/date_extract.asciidoc +++ b/docs/reference/esql/functions/parameters/date_extract.asciidoc @@ -1,7 +1,7 @@ *Parameters* -`date_part`:: +`datePart`:: Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era. -`field`:: +`date`:: Date expression diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc index 5e41a08473a4e..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/floor.asciidoc +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/left.asciidoc b/docs/reference/esql/functions/parameters/left.asciidoc index df95257f4160a..b296adfc064be 100644 --- a/docs/reference/esql/functions/parameters/left.asciidoc +++ b/docs/reference/esql/functions/parameters/left.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`str`:: +`string`:: The string from which to return a substring. `length`:: diff --git a/docs/reference/esql/functions/parameters/length.asciidoc b/docs/reference/esql/functions/parameters/length.asciidoc index 4fb63948eceaa..4c3a25283c403 100644 --- a/docs/reference/esql/functions/parameters/length.asciidoc +++ b/docs/reference/esql/functions/parameters/length.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`str`:: +`string`:: diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc index 25292ef14d7da..1d2306c5b215b 100644 --- a/docs/reference/esql/functions/parameters/log.asciidoc +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -3,5 +3,5 @@ `base`:: -`value`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc index 5e41a08473a4e..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/log10.asciidoc +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/ltrim.asciidoc b/docs/reference/esql/functions/parameters/ltrim.asciidoc index 4fb63948eceaa..4c3a25283c403 100644 --- a/docs/reference/esql/functions/parameters/ltrim.asciidoc +++ b/docs/reference/esql/functions/parameters/ltrim.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`str`:: +`string`:: diff --git a/docs/reference/esql/functions/parameters/mv_avg.asciidoc b/docs/reference/esql/functions/parameters/mv_avg.asciidoc index 56df4f5138a27..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/mv_avg.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_avg.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`field`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/mv_concat.asciidoc b/docs/reference/esql/functions/parameters/mv_concat.asciidoc index c4b846f766d8c..88893478e2b74 100644 --- a/docs/reference/esql/functions/parameters/mv_concat.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_concat.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`v`:: +`string`:: values to join `delim`:: diff --git a/docs/reference/esql/functions/parameters/mv_count.asciidoc b/docs/reference/esql/functions/parameters/mv_count.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_count.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_count.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_first.asciidoc b/docs/reference/esql/functions/parameters/mv_first.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_first.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_first.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_last.asciidoc b/docs/reference/esql/functions/parameters/mv_last.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_last.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_last.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_max.asciidoc b/docs/reference/esql/functions/parameters/mv_max.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_max.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_max.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_median.asciidoc b/docs/reference/esql/functions/parameters/mv_median.asciidoc index 915b46e872870..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/mv_median.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_median.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/mv_min.asciidoc b/docs/reference/esql/functions/parameters/mv_min.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/mv_min.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_min.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/mv_slice.asciidoc b/docs/reference/esql/functions/parameters/mv_slice.asciidoc index 83faf7c299fb2..cffbfaff95e86 100644 --- a/docs/reference/esql/functions/parameters/mv_slice.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_slice.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`v`:: +`field`:: A multivalued field `start`:: diff --git a/docs/reference/esql/functions/parameters/mv_sum.asciidoc b/docs/reference/esql/functions/parameters/mv_sum.asciidoc index 915b46e872870..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/mv_sum.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_sum.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/mv_zip.asciidoc b/docs/reference/esql/functions/parameters/mv_zip.asciidoc index 592f119795cd9..09ab5969fe66a 100644 --- a/docs/reference/esql/functions/parameters/mv_zip.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_zip.asciidoc @@ -1,9 +1,9 @@ *Parameters* -`mvLeft`:: +`string1`:: A multivalued field -`mvRight`:: +`string2`:: A multivalued field `delim`:: diff --git a/docs/reference/esql/functions/parameters/replace.asciidoc b/docs/reference/esql/functions/parameters/replace.asciidoc index 6180447b7067e..f8831e5a6b8c5 100644 --- a/docs/reference/esql/functions/parameters/replace.asciidoc +++ b/docs/reference/esql/functions/parameters/replace.asciidoc @@ -1,10 +1,10 @@ *Parameters* -`str`:: +`string`:: `regex`:: -`newStr`:: +`newString`:: diff --git a/docs/reference/esql/functions/parameters/right.asciidoc b/docs/reference/esql/functions/parameters/right.asciidoc index 34d92a327e958..3ddd7e7c8cd68 100644 --- a/docs/reference/esql/functions/parameters/right.asciidoc +++ b/docs/reference/esql/functions/parameters/right.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`str`:: +`string`:: `length`:: diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc index a6fe2eb73eba6..ef53d9e07eb00 100644 --- a/docs/reference/esql/functions/parameters/round.asciidoc +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`value`:: +`number`:: The numeric value to round `decimals`:: diff --git a/docs/reference/esql/functions/parameters/rtrim.asciidoc b/docs/reference/esql/functions/parameters/rtrim.asciidoc index 4fb63948eceaa..4c3a25283c403 100644 --- a/docs/reference/esql/functions/parameters/rtrim.asciidoc +++ b/docs/reference/esql/functions/parameters/rtrim.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`str`:: +`string`:: diff --git a/docs/reference/esql/functions/parameters/sin.asciidoc b/docs/reference/esql/functions/parameters/sin.asciidoc index 6c943569ecd04..eceab83443236 100644 --- a/docs/reference/esql/functions/parameters/sin.asciidoc +++ b/docs/reference/esql/functions/parameters/sin.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: An angle, in radians diff --git a/docs/reference/esql/functions/parameters/sinh.asciidoc b/docs/reference/esql/functions/parameters/sinh.asciidoc index b94854e0bcab6..d2dfa9701ff89 100644 --- a/docs/reference/esql/functions/parameters/sinh.asciidoc +++ b/docs/reference/esql/functions/parameters/sinh.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: The number to return the hyperbolic sine of diff --git a/docs/reference/esql/functions/parameters/split.asciidoc b/docs/reference/esql/functions/parameters/split.asciidoc index a6b9e26af34b1..7b3c24adae928 100644 --- a/docs/reference/esql/functions/parameters/split.asciidoc +++ b/docs/reference/esql/functions/parameters/split.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`str`:: +`string`:: `delim`:: diff --git a/docs/reference/esql/functions/parameters/sqrt.asciidoc b/docs/reference/esql/functions/parameters/sqrt.asciidoc index 5e41a08473a4e..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/sqrt.asciidoc +++ b/docs/reference/esql/functions/parameters/sqrt.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/parameters/substring.asciidoc b/docs/reference/esql/functions/parameters/substring.asciidoc index 1a689b56cff71..19c4e5551185a 100644 --- a/docs/reference/esql/functions/parameters/substring.asciidoc +++ b/docs/reference/esql/functions/parameters/substring.asciidoc @@ -1,6 +1,6 @@ *Parameters* -`str`:: +`string`:: `start`:: diff --git a/docs/reference/esql/functions/parameters/tan.asciidoc b/docs/reference/esql/functions/parameters/tan.asciidoc index 6c943569ecd04..eceab83443236 100644 --- a/docs/reference/esql/functions/parameters/tan.asciidoc +++ b/docs/reference/esql/functions/parameters/tan.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: An angle, in radians diff --git a/docs/reference/esql/functions/parameters/tanh.asciidoc b/docs/reference/esql/functions/parameters/tanh.asciidoc index 7f78c35fd4cd2..1fc97c3b68f84 100644 --- a/docs/reference/esql/functions/parameters/tanh.asciidoc +++ b/docs/reference/esql/functions/parameters/tanh.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`n`:: +`number`:: The number to return the hyperbolic tangent of diff --git a/docs/reference/esql/functions/parameters/to_boolean.asciidoc b/docs/reference/esql/functions/parameters/to_boolean.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_boolean.asciidoc +++ b/docs/reference/esql/functions/parameters/to_boolean.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_datetime.asciidoc b/docs/reference/esql/functions/parameters/to_datetime.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_datetime.asciidoc +++ b/docs/reference/esql/functions/parameters/to_datetime.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_degrees.asciidoc b/docs/reference/esql/functions/parameters/to_degrees.asciidoc index 915b46e872870..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/to_degrees.asciidoc +++ b/docs/reference/esql/functions/parameters/to_degrees.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/to_double.asciidoc b/docs/reference/esql/functions/parameters/to_double.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_double.asciidoc +++ b/docs/reference/esql/functions/parameters/to_double.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_integer.asciidoc b/docs/reference/esql/functions/parameters/to_integer.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_integer.asciidoc +++ b/docs/reference/esql/functions/parameters/to_integer.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_ip.asciidoc b/docs/reference/esql/functions/parameters/to_ip.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_ip.asciidoc +++ b/docs/reference/esql/functions/parameters/to_ip.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_long.asciidoc b/docs/reference/esql/functions/parameters/to_long.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_long.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_radians.asciidoc b/docs/reference/esql/functions/parameters/to_radians.asciidoc index 915b46e872870..9faa6c1adebe2 100644 --- a/docs/reference/esql/functions/parameters/to_radians.asciidoc +++ b/docs/reference/esql/functions/parameters/to_radians.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`number`:: diff --git a/docs/reference/esql/functions/parameters/to_string.asciidoc b/docs/reference/esql/functions/parameters/to_string.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_string.asciidoc +++ b/docs/reference/esql/functions/parameters/to_string.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/to_version.asciidoc b/docs/reference/esql/functions/parameters/to_version.asciidoc index 915b46e872870..56df4f5138a27 100644 --- a/docs/reference/esql/functions/parameters/to_version.asciidoc +++ b/docs/reference/esql/functions/parameters/to_version.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`v`:: +`field`:: diff --git a/docs/reference/esql/functions/parameters/trim.asciidoc b/docs/reference/esql/functions/parameters/trim.asciidoc index 4fb63948eceaa..4c3a25283c403 100644 --- a/docs/reference/esql/functions/parameters/trim.asciidoc +++ b/docs/reference/esql/functions/parameters/trim.asciidoc @@ -1,4 +1,4 @@ *Parameters* -`str`:: +`string`:: diff --git a/docs/reference/esql/functions/signature/abs.svg b/docs/reference/esql/functions/signature/abs.svg index 0b7aac99357ea..0ca58b790aeab 100644 --- a/docs/reference/esql/functions/signature/abs.svg +++ b/docs/reference/esql/functions/signature/abs.svg @@ -1 +1 @@ -ABS(n) \ No newline at end of file +ABS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/acos.svg b/docs/reference/esql/functions/signature/acos.svg index 6a2e2c04cd20e..837a3aa00258b 100644 --- a/docs/reference/esql/functions/signature/acos.svg +++ b/docs/reference/esql/functions/signature/acos.svg @@ -1 +1 @@ -ACOS(n) \ No newline at end of file +ACOS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/asin.svg b/docs/reference/esql/functions/signature/asin.svg index 9792e7316b138..85bff67ad7a96 100644 --- a/docs/reference/esql/functions/signature/asin.svg +++ b/docs/reference/esql/functions/signature/asin.svg @@ -1 +1 @@ -ASIN(n) \ No newline at end of file +ASIN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan.svg b/docs/reference/esql/functions/signature/atan.svg index 184956ff2e126..e741718b6dbd8 100644 --- a/docs/reference/esql/functions/signature/atan.svg +++ b/docs/reference/esql/functions/signature/atan.svg @@ -1 +1 @@ -ATAN(n) \ No newline at end of file +ATAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/atan2.svg b/docs/reference/esql/functions/signature/atan2.svg index f2295d3d98f16..d54fcf2770c3c 100644 --- a/docs/reference/esql/functions/signature/atan2.svg +++ b/docs/reference/esql/functions/signature/atan2.svg @@ -1 +1 @@ -ATAN2(y,x) \ No newline at end of file +ATAN2(y_coordinate,x_coordinate) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ceil.svg b/docs/reference/esql/functions/signature/ceil.svg index bb07117e56630..51b1bbba6bcef 100644 --- a/docs/reference/esql/functions/signature/ceil.svg +++ b/docs/reference/esql/functions/signature/ceil.svg @@ -1 +1 @@ -CEIL(n) \ No newline at end of file +CEIL(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/coalesce.svg b/docs/reference/esql/functions/signature/coalesce.svg index 22a70efead49c..4c4119582b223 100644 --- a/docs/reference/esql/functions/signature/coalesce.svg +++ b/docs/reference/esql/functions/signature/coalesce.svg @@ -1 +1 @@ -COALESCE(expression,expressionX) \ No newline at end of file +COALESCE(first,rest) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cos.svg b/docs/reference/esql/functions/signature/cos.svg index f06a24726f71a..ff0484a362aef 100644 --- a/docs/reference/esql/functions/signature/cos.svg +++ b/docs/reference/esql/functions/signature/cos.svg @@ -1 +1 @@ -COS(n) \ No newline at end of file +COS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cosh.svg b/docs/reference/esql/functions/signature/cosh.svg index 54ea9bff84097..9b9eddd3cb808 100644 --- a/docs/reference/esql/functions/signature/cosh.svg +++ b/docs/reference/esql/functions/signature/cosh.svg @@ -1 +1 @@ -COSH(n) \ No newline at end of file +COSH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/date_extract.svg b/docs/reference/esql/functions/signature/date_extract.svg index 397cdd400d88c..9aa7d4f4869c0 100644 --- a/docs/reference/esql/functions/signature/date_extract.svg +++ b/docs/reference/esql/functions/signature/date_extract.svg @@ -1 +1 @@ -DATE_EXTRACT(date_part,field) \ No newline at end of file +DATE_EXTRACT(datePart,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/floor.svg b/docs/reference/esql/functions/signature/floor.svg index 7e153548bfd82..06a7de70fce3e 100644 --- a/docs/reference/esql/functions/signature/floor.svg +++ b/docs/reference/esql/functions/signature/floor.svg @@ -1 +1 @@ -FLOOR(n) \ No newline at end of file +FLOOR(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/left.svg b/docs/reference/esql/functions/signature/left.svg index 75704982af004..ec14bf8c72131 100644 --- a/docs/reference/esql/functions/signature/left.svg +++ b/docs/reference/esql/functions/signature/left.svg @@ -1 +1 @@ -LEFT(str,length) \ No newline at end of file +LEFT(string,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/length.svg b/docs/reference/esql/functions/signature/length.svg index d199f1a9a0170..a1410895f7fdf 100644 --- a/docs/reference/esql/functions/signature/length.svg +++ b/docs/reference/esql/functions/signature/length.svg @@ -1 +1 @@ -LENGTH(str) \ No newline at end of file +LENGTH(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/log.svg b/docs/reference/esql/functions/signature/log.svg index 39a9a7e8dc52e..6b013ee52f498 100644 --- a/docs/reference/esql/functions/signature/log.svg +++ b/docs/reference/esql/functions/signature/log.svg @@ -1 +1 @@ -LOG(base,value) \ No newline at end of file +LOG(base,number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/log10.svg b/docs/reference/esql/functions/signature/log10.svg index 50edcf6ea943f..20522ccff4bea 100644 --- a/docs/reference/esql/functions/signature/log10.svg +++ b/docs/reference/esql/functions/signature/log10.svg @@ -1 +1 @@ -LOG10(n) \ No newline at end of file +LOG10(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/ltrim.svg b/docs/reference/esql/functions/signature/ltrim.svg index 327e75b92ca19..6fc4b0b0f8f47 100644 --- a/docs/reference/esql/functions/signature/ltrim.svg +++ b/docs/reference/esql/functions/signature/ltrim.svg @@ -1 +1 @@ -LTRIM(str) \ No newline at end of file +LTRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_avg.svg b/docs/reference/esql/functions/signature/mv_avg.svg index 4c2371eac0b44..c3d71a5f6d5d8 100644 --- a/docs/reference/esql/functions/signature/mv_avg.svg +++ b/docs/reference/esql/functions/signature/mv_avg.svg @@ -1 +1 @@ -MV_AVG(field) \ No newline at end of file +MV_AVG(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_concat.svg b/docs/reference/esql/functions/signature/mv_concat.svg index ec3a3aa4ae750..34dd6f98a9dce 100644 --- a/docs/reference/esql/functions/signature/mv_concat.svg +++ b/docs/reference/esql/functions/signature/mv_concat.svg @@ -1 +1 @@ -MV_CONCAT(v,delim) \ No newline at end of file +MV_CONCAT(string,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_count.svg b/docs/reference/esql/functions/signature/mv_count.svg index 48e60f26e394d..faac0140e5910 100644 --- a/docs/reference/esql/functions/signature/mv_count.svg +++ b/docs/reference/esql/functions/signature/mv_count.svg @@ -1 +1 @@ -MV_COUNT(v) \ No newline at end of file +MV_COUNT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_dedupe.svg b/docs/reference/esql/functions/signature/mv_dedupe.svg index 92be3210ce895..b84aa78051e44 100644 --- a/docs/reference/esql/functions/signature/mv_dedupe.svg +++ b/docs/reference/esql/functions/signature/mv_dedupe.svg @@ -1 +1 @@ -MV_DEDUPE(v) \ No newline at end of file +MV_DEDUPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_first.svg b/docs/reference/esql/functions/signature/mv_first.svg index 20d201eab0add..0f302227091d8 100644 --- a/docs/reference/esql/functions/signature/mv_first.svg +++ b/docs/reference/esql/functions/signature/mv_first.svg @@ -1 +1 @@ -MV_FIRST(v) \ No newline at end of file +MV_FIRST(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_last.svg b/docs/reference/esql/functions/signature/mv_last.svg index eb32bb49f8ccc..3c60c008a9004 100644 --- a/docs/reference/esql/functions/signature/mv_last.svg +++ b/docs/reference/esql/functions/signature/mv_last.svg @@ -1 +1 @@ -MV_LAST(v) \ No newline at end of file +MV_LAST(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_max.svg b/docs/reference/esql/functions/signature/mv_max.svg index 6c64809be0720..18b0ef9847ae8 100644 --- a/docs/reference/esql/functions/signature/mv_max.svg +++ b/docs/reference/esql/functions/signature/mv_max.svg @@ -1 +1 @@ -MV_MAX(v) \ No newline at end of file +MV_MAX(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_median.svg b/docs/reference/esql/functions/signature/mv_median.svg index b287fde6dd97e..be0e34b5739e0 100644 --- a/docs/reference/esql/functions/signature/mv_median.svg +++ b/docs/reference/esql/functions/signature/mv_median.svg @@ -1 +1 @@ -MV_MEDIAN(v) \ No newline at end of file +MV_MEDIAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_min.svg b/docs/reference/esql/functions/signature/mv_min.svg index c6ef5e30c289c..c6f998283de4c 100644 --- a/docs/reference/esql/functions/signature/mv_min.svg +++ b/docs/reference/esql/functions/signature/mv_min.svg @@ -1 +1 @@ -MV_MIN(v) \ No newline at end of file +MV_MIN(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_slice.svg b/docs/reference/esql/functions/signature/mv_slice.svg index 277566a35e47d..c62cec6ceca4d 100644 --- a/docs/reference/esql/functions/signature/mv_slice.svg +++ b/docs/reference/esql/functions/signature/mv_slice.svg @@ -1 +1 @@ -MV_SLICE(v,start,end) \ No newline at end of file +MV_SLICE(field,start,end) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_sum.svg b/docs/reference/esql/functions/signature/mv_sum.svg index 3e3fbd30355b1..ca296b9f96711 100644 --- a/docs/reference/esql/functions/signature/mv_sum.svg +++ b/docs/reference/esql/functions/signature/mv_sum.svg @@ -1 +1 @@ -MV_SUM(v) \ No newline at end of file +MV_SUM(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_zip.svg b/docs/reference/esql/functions/signature/mv_zip.svg index 02c61b3c4bc5c..727d09ae7f47e 100644 --- a/docs/reference/esql/functions/signature/mv_zip.svg +++ b/docs/reference/esql/functions/signature/mv_zip.svg @@ -1 +1 @@ -MV_ZIP(mvLeft,mvRight,delim) \ No newline at end of file +MV_ZIP(string1,string2,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/replace.svg b/docs/reference/esql/functions/signature/replace.svg index bbcd11bcc0ab6..49fccf329732d 100644 --- a/docs/reference/esql/functions/signature/replace.svg +++ b/docs/reference/esql/functions/signature/replace.svg @@ -1 +1 @@ -REPLACE(str,regex,newStr) \ No newline at end of file +REPLACE(string,regex,newString) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/right.svg b/docs/reference/esql/functions/signature/right.svg index 969a6c9442479..0afa5dbf01f16 100644 --- a/docs/reference/esql/functions/signature/right.svg +++ b/docs/reference/esql/functions/signature/right.svg @@ -1 +1 @@ -RIGHT(str,length) \ No newline at end of file +RIGHT(string,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/round.svg b/docs/reference/esql/functions/signature/round.svg index 9da0b9d11213e..288b8ab2383e4 100644 --- a/docs/reference/esql/functions/signature/round.svg +++ b/docs/reference/esql/functions/signature/round.svg @@ -1 +1 @@ -ROUND(value,decimals) \ No newline at end of file +ROUND(number,decimals) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/rtrim.svg b/docs/reference/esql/functions/signature/rtrim.svg index b830bb59c5c31..1f2bfe6252a64 100644 --- a/docs/reference/esql/functions/signature/rtrim.svg +++ b/docs/reference/esql/functions/signature/rtrim.svg @@ -1 +1 @@ -RTRIM(str) \ No newline at end of file +RTRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sin.svg b/docs/reference/esql/functions/signature/sin.svg index eb20f3386d441..2c60f0580f8fb 100644 --- a/docs/reference/esql/functions/signature/sin.svg +++ b/docs/reference/esql/functions/signature/sin.svg @@ -1 +1 @@ -SIN(n) \ No newline at end of file +SIN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sinh.svg b/docs/reference/esql/functions/signature/sinh.svg index 30361aca1fb35..16e7ddb6b6534 100644 --- a/docs/reference/esql/functions/signature/sinh.svg +++ b/docs/reference/esql/functions/signature/sinh.svg @@ -1 +1 @@ -SINH(n) \ No newline at end of file +SINH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/split.svg b/docs/reference/esql/functions/signature/split.svg index 1213f6041b0c4..5d148a750fa8c 100644 --- a/docs/reference/esql/functions/signature/split.svg +++ b/docs/reference/esql/functions/signature/split.svg @@ -1 +1 @@ -SPLIT(str,delim) \ No newline at end of file +SPLIT(string,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sqrt.svg b/docs/reference/esql/functions/signature/sqrt.svg index 77c657120735f..8816e55429550 100644 --- a/docs/reference/esql/functions/signature/sqrt.svg +++ b/docs/reference/esql/functions/signature/sqrt.svg @@ -1 +1 @@ -SQRT(n) \ No newline at end of file +SQRT(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/substring.svg b/docs/reference/esql/functions/signature/substring.svg index 1f9f6e8c3afa0..41eb89fb2504a 100644 --- a/docs/reference/esql/functions/signature/substring.svg +++ b/docs/reference/esql/functions/signature/substring.svg @@ -1 +1 @@ -SUBSTRING(str,start,length) \ No newline at end of file +SUBSTRING(string,start,length) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tan.svg b/docs/reference/esql/functions/signature/tan.svg index 8ac6ee37cb52a..c8065b30586cc 100644 --- a/docs/reference/esql/functions/signature/tan.svg +++ b/docs/reference/esql/functions/signature/tan.svg @@ -1 +1 @@ -TAN(n) \ No newline at end of file +TAN(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tanh.svg b/docs/reference/esql/functions/signature/tanh.svg index dfe167afc5470..c2edfe2d6942f 100644 --- a/docs/reference/esql/functions/signature/tanh.svg +++ b/docs/reference/esql/functions/signature/tanh.svg @@ -1 +1 @@ -TANH(n) \ No newline at end of file +TANH(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_boolean.svg b/docs/reference/esql/functions/signature/to_boolean.svg index 43c2aac2bca53..97c86bf1543ee 100644 --- a/docs/reference/esql/functions/signature/to_boolean.svg +++ b/docs/reference/esql/functions/signature/to_boolean.svg @@ -1 +1 @@ -TO_BOOLEAN(v) \ No newline at end of file +TO_BOOLEAN(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_cartesianpoint.svg b/docs/reference/esql/functions/signature/to_cartesianpoint.svg index 44484e8321e2f..0de42490c2d57 100644 --- a/docs/reference/esql/functions/signature/to_cartesianpoint.svg +++ b/docs/reference/esql/functions/signature/to_cartesianpoint.svg @@ -1 +1 @@ -TO_CARTESIANPOINT(v) \ No newline at end of file +TO_CARTESIANPOINT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_cartesianshape.svg b/docs/reference/esql/functions/signature/to_cartesianshape.svg index c16ce9a6c15bc..fdc56831122ee 100644 --- a/docs/reference/esql/functions/signature/to_cartesianshape.svg +++ b/docs/reference/esql/functions/signature/to_cartesianshape.svg @@ -1 +1 @@ -TO_CARTESIANSHAPE(v) \ No newline at end of file +TO_CARTESIANSHAPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_datetime.svg b/docs/reference/esql/functions/signature/to_datetime.svg index eb9e74248471a..cfd2fcd94aaa0 100644 --- a/docs/reference/esql/functions/signature/to_datetime.svg +++ b/docs/reference/esql/functions/signature/to_datetime.svg @@ -1 +1 @@ -TO_DATETIME(v) \ No newline at end of file +TO_DATETIME(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg index 01fe0a4770156..1f323d6f2be6a 100644 --- a/docs/reference/esql/functions/signature/to_degrees.svg +++ b/docs/reference/esql/functions/signature/to_degrees.svg @@ -1 +1 @@ -TO_DEGREES(v) \ No newline at end of file +TO_DEGREES(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_double.svg b/docs/reference/esql/functions/signature/to_double.svg index e785e30ce5f81..1e87a7ea6a4a5 100644 --- a/docs/reference/esql/functions/signature/to_double.svg +++ b/docs/reference/esql/functions/signature/to_double.svg @@ -1 +1 @@ -TO_DOUBLE(v) \ No newline at end of file +TO_DOUBLE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_geopoint.svg b/docs/reference/esql/functions/signature/to_geopoint.svg index 444817aa388b9..e74ea27d1a9f6 100644 --- a/docs/reference/esql/functions/signature/to_geopoint.svg +++ b/docs/reference/esql/functions/signature/to_geopoint.svg @@ -1 +1 @@ -TO_GEOPOINT(v) \ No newline at end of file +TO_GEOPOINT(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_geoshape.svg b/docs/reference/esql/functions/signature/to_geoshape.svg index 91b02332ad806..8991592cea2fa 100644 --- a/docs/reference/esql/functions/signature/to_geoshape.svg +++ b/docs/reference/esql/functions/signature/to_geoshape.svg @@ -1 +1 @@ -TO_GEOSHAPE(v) \ No newline at end of file +TO_GEOSHAPE(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_integer.svg b/docs/reference/esql/functions/signature/to_integer.svg index beb2e94039e53..413acdb2ce6d9 100644 --- a/docs/reference/esql/functions/signature/to_integer.svg +++ b/docs/reference/esql/functions/signature/to_integer.svg @@ -1 +1 @@ -TO_INTEGER(v) \ No newline at end of file +TO_INTEGER(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_ip.svg b/docs/reference/esql/functions/signature/to_ip.svg index c1669c9376c8b..54856be5439b5 100644 --- a/docs/reference/esql/functions/signature/to_ip.svg +++ b/docs/reference/esql/functions/signature/to_ip.svg @@ -1 +1 @@ -TO_IP(v) \ No newline at end of file +TO_IP(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_long.svg b/docs/reference/esql/functions/signature/to_long.svg index 464d4a001cb35..ffe94435ebc04 100644 --- a/docs/reference/esql/functions/signature/to_long.svg +++ b/docs/reference/esql/functions/signature/to_long.svg @@ -1 +1 @@ -TO_LONG(v) \ No newline at end of file +TO_LONG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_radians.svg b/docs/reference/esql/functions/signature/to_radians.svg index 712431fb32497..8388ad682df3c 100644 --- a/docs/reference/esql/functions/signature/to_radians.svg +++ b/docs/reference/esql/functions/signature/to_radians.svg @@ -1 +1 @@ -TO_RADIANS(v) \ No newline at end of file +TO_RADIANS(number) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_string.svg b/docs/reference/esql/functions/signature/to_string.svg index 72fc676289d64..e7a167348c5a0 100644 --- a/docs/reference/esql/functions/signature/to_string.svg +++ b/docs/reference/esql/functions/signature/to_string.svg @@ -1 +1 @@ -TO_STRING(v) \ No newline at end of file +TO_STRING(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_unsigned_long.svg b/docs/reference/esql/functions/signature/to_unsigned_long.svg index da07b3a4c7349..c19873a0c4015 100644 --- a/docs/reference/esql/functions/signature/to_unsigned_long.svg +++ b/docs/reference/esql/functions/signature/to_unsigned_long.svg @@ -1 +1 @@ -TO_UNSIGNED_LONG(v) \ No newline at end of file +TO_UNSIGNED_LONG(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_version.svg b/docs/reference/esql/functions/signature/to_version.svg index e6337280c2e8d..3266c0b476ae6 100644 --- a/docs/reference/esql/functions/signature/to_version.svg +++ b/docs/reference/esql/functions/signature/to_version.svg @@ -1 +1 @@ -TO_VERSION(v) \ No newline at end of file +TO_VERSION(field) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/trim.svg b/docs/reference/esql/functions/signature/trim.svg index 5fc865d306f11..9d16d660b57ed 100644 --- a/docs/reference/esql/functions/signature/trim.svg +++ b/docs/reference/esql/functions/signature/trim.svg @@ -1 +1 @@ -TRIM(str) \ No newline at end of file +TRIM(string) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/abs.asciidoc b/docs/reference/esql/functions/types/abs.asciidoc index ff3073a11986b..8e9bd02d381e9 100644 --- a/docs/reference/esql/functions/types/abs.asciidoc +++ b/docs/reference/esql/functions/types/abs.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/acos.asciidoc b/docs/reference/esql/functions/types/acos.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/acos.asciidoc +++ b/docs/reference/esql/functions/types/acos.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/asin.asciidoc b/docs/reference/esql/functions/types/asin.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/asin.asciidoc +++ b/docs/reference/esql/functions/types/asin.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan.asciidoc b/docs/reference/esql/functions/types/atan.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/atan.asciidoc +++ b/docs/reference/esql/functions/types/atan.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/atan2.asciidoc b/docs/reference/esql/functions/types/atan2.asciidoc index 9684923c65edc..de4fcf9ef3258 100644 --- a/docs/reference/esql/functions/types/atan2.asciidoc +++ b/docs/reference/esql/functions/types/atan2.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -y | x | result +y_coordinate | x_coordinate | result double | double | double double | integer | double double | long | double diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 0edfc4f5de29b..74afa40892d39 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -4,6 +4,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -condition | value | result +condition | trueValue | result |=== diff --git a/docs/reference/esql/functions/types/ceil.asciidoc b/docs/reference/esql/functions/types/ceil.asciidoc index ff3073a11986b..8e9bd02d381e9 100644 --- a/docs/reference/esql/functions/types/ceil.asciidoc +++ b/docs/reference/esql/functions/types/ceil.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 7d538636d6aec..a19b3aa073d32 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -expression | expressionX | result +first | rest | result boolean | boolean | boolean integer | integer | integer keyword | keyword | keyword diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 28bb85bca8312..401889f38b9e9 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -4,6 +4,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -date_part | field | result +datePart | date | result keyword | datetime | long |=== diff --git a/docs/reference/esql/functions/types/floor.asciidoc b/docs/reference/esql/functions/types/floor.asciidoc index ff3073a11986b..8e9bd02d381e9 100644 --- a/docs/reference/esql/functions/types/floor.asciidoc +++ b/docs/reference/esql/functions/types/floor.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index 78a95bb801378..728b5d188b32c 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | length | result +string | length | result keyword | integer | keyword text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index fac7f22999714..4c24499f7ab52 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | integer text | integer |=== diff --git a/docs/reference/esql/functions/types/log.asciidoc b/docs/reference/esql/functions/types/log.asciidoc index a04ceebada910..d5ec4e9e0e2c8 100644 --- a/docs/reference/esql/functions/types/log.asciidoc +++ b/docs/reference/esql/functions/types/log.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -base | value | result +base | number | result double | double | double double | integer | double double | long | double diff --git a/docs/reference/esql/functions/types/log10.asciidoc b/docs/reference/esql/functions/types/log10.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/log10.asciidoc +++ b/docs/reference/esql/functions/types/log10.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/ltrim.asciidoc b/docs/reference/esql/functions/types/ltrim.asciidoc index 8174a8b93bcd5..d87cc86423798 100644 --- a/docs/reference/esql/functions/types/ltrim.asciidoc +++ b/docs/reference/esql/functions/types/ltrim.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index f23c3b15363b5..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -field | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index fa4e7af29059e..7ea15633bc180 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | delim | result +string | delim | result keyword | keyword | keyword keyword | text | keyword text | keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index f917938233cf4..b4ec6706b5008 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | integer cartesian_point | integer cartesian_shape | integer diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index a66c7e21aaab3..db2476b22d74b 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 82450a489a895..0346d46de073c 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 82450a489a895..0346d46de073c 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index 555230a1c7252..e3301cb2c9abe 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index eb0c9996f0bd4..8e9bd02d381e9 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index 555230a1c7252..e3301cb2c9abe 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean datetime | datetime double | double diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index a832424f25560..49b5761f64fc8 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | start | end | result +field | start | end | result boolean | integer | integer | boolean cartesian_point | integer | integer | cartesian_point cartesian_shape | integer | integer | cartesian_shape diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index eb0c9996f0bd4..8e9bd02d381e9 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | integer long | long diff --git a/docs/reference/esql/functions/types/mv_zip.asciidoc b/docs/reference/esql/functions/types/mv_zip.asciidoc index 04495b9c6ee17..b7522019e454f 100644 --- a/docs/reference/esql/functions/types/mv_zip.asciidoc +++ b/docs/reference/esql/functions/types/mv_zip.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -mvLeft | mvRight | delim | result +string1 | string2 | delim | result keyword | keyword | keyword | keyword text | text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 3a9d25d69fa11..57ffce246f395 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | regex | newStr | result +string | regex | newString | result keyword | keyword | keyword | keyword keyword | keyword | text | keyword keyword | text | keyword | keyword diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index 78a95bb801378..728b5d188b32c 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | length | result +string | length | result keyword | integer | keyword text | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 96cfcad5adb1d..3ff795ff1f5a2 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -4,6 +4,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -value | decimals | result +number | decimals | result double | integer | double |=== diff --git a/docs/reference/esql/functions/types/rtrim.asciidoc b/docs/reference/esql/functions/types/rtrim.asciidoc index 8174a8b93bcd5..d87cc86423798 100644 --- a/docs/reference/esql/functions/types/rtrim.asciidoc +++ b/docs/reference/esql/functions/types/rtrim.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index affb344eecbcb..61c4546a54709 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | delim | result +string | delim | result keyword | keyword | keyword text | text | keyword |=== diff --git a/docs/reference/esql/functions/types/sqrt.asciidoc b/docs/reference/esql/functions/types/sqrt.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/sqrt.asciidoc +++ b/docs/reference/esql/functions/types/sqrt.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/st_intersects.asciidoc b/docs/reference/esql/functions/types/st_intersects.asciidoc index b061ebd41359c..15e3de732f146 100644 --- a/docs/reference/esql/functions/types/st_intersects.asciidoc +++ b/docs/reference/esql/functions/types/st_intersects.asciidoc @@ -1,3 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. + +*Supported types* + [%header.monospaced.styled,format=dsv,separator=|] |=== geomA | geomB | result diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index 0729e28f98ecc..784b8a7d48da8 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | start | length | result +string | start | length | result keyword | integer | integer | keyword text | integer | integer | keyword |=== diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index f34dea349ad12..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -n | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc index 6138578b26d9e..389c96bbab0f8 100644 --- a/docs/reference/esql/functions/types/to_boolean.asciidoc +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | boolean double | boolean integer | boolean diff --git a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc index 3ae44dd04a67f..04d994833e8b3 100644 --- a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result cartesian_point | cartesian_point keyword | cartesian_point text | cartesian_point diff --git a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc index a0fe12cf20875..7b74e9c797575 100644 --- a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result cartesian_point | cartesian_shape cartesian_shape | cartesian_shape keyword | cartesian_shape diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index ca89c8dd47d62..a3fff3beeee20 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result datetime | datetime double | datetime integer | datetime diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc index 210a63718baff..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/to_degrees.asciidoc +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc index 5702d5a8c88e3..ea3c213f1e307 100644 --- a/docs/reference/esql/functions/types/to_double.asciidoc +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | double datetime | double double | double diff --git a/docs/reference/esql/functions/types/to_geopoint.asciidoc b/docs/reference/esql/functions/types/to_geopoint.asciidoc index 579a93affebab..9d79461e3c2c9 100644 --- a/docs/reference/esql/functions/types/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/types/to_geopoint.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result geo_point | geo_point keyword | geo_point text | geo_point diff --git a/docs/reference/esql/functions/types/to_geoshape.asciidoc b/docs/reference/esql/functions/types/to_geoshape.asciidoc index faf922c8723e0..0ba662944df80 100644 --- a/docs/reference/esql/functions/types/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/types/to_geoshape.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result geo_point | geo_shape geo_shape | geo_shape keyword | geo_shape diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc index 04cf58a2df364..6e1ce99b109e3 100644 --- a/docs/reference/esql/functions/types/to_integer.asciidoc +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | integer datetime | integer double | integer diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index 3e7412ce9b64a..3c038215f824c 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result ip | ip keyword | ip text | ip diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index e32f5d80ae92f..9b4ba9f91b835 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | long datetime | long double | long diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc index 210a63718baff..5172c78a3b96b 100644 --- a/docs/reference/esql/functions/types/to_radians.asciidoc +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +number | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 3e4253694e91c..c8b2cff97b4e8 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | keyword cartesian_point | keyword cartesian_shape | keyword diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc index b35ad29c0f193..51fa9e2022603 100644 --- a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result boolean | unsigned_long datetime | unsigned_long double | unsigned_long diff --git a/docs/reference/esql/functions/types/to_version.asciidoc b/docs/reference/esql/functions/types/to_version.asciidoc index 66bbb421e8636..fc6daace75c73 100644 --- a/docs/reference/esql/functions/types/to_version.asciidoc +++ b/docs/reference/esql/functions/types/to_version.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -v | result +field | result keyword | version text | version version | version diff --git a/docs/reference/esql/functions/types/trim.asciidoc b/docs/reference/esql/functions/types/trim.asciidoc index 8174a8b93bcd5..d87cc86423798 100644 --- a/docs/reference/esql/functions/types/trim.asciidoc +++ b/docs/reference/esql/functions/types/trim.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -str | result +string | result keyword | keyword text | text |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index d94d39f0a0c81..8491919b3ee93 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1385,32 +1385,32 @@ emp_no:integer | min_plus_max:integer | are_equal:boolean ; docsAbs -//tag::docsAbs[] +//tag::abs[] ROW number = -1.0 | EVAL abs_number = ABS(number) -//end::docsAbs[] +//end::abs[] ; -//tag::docsAbs-result[] -number:double | abs_number:double --1.0 |1.0 -//end::docsAbs-result[] +//tag::abs-result[] +number:double | abs_number:double +-1.0 | 1.0 +//end::abs-result[] ; docsAbsEmployees -//tag::docsAbsEmployees[] +//tag::abs-employees[] FROM employees | KEEP first_name, last_name, height | EVAL abs_height = ABS(0.0 - height) -//end::docsAbsEmployees[] +//end::abs-employees[] | SORT first_name | LIMIT 3 ; -//tag::docsAbsEmployees-result[] +//tag::abs-employees-result[] first_name:keyword | last_name:keyword | height:double | abs_height:double Alejandro |McAlpine |1.48 |1.48 Amabile |Gomatam |2.09 |2.09 Anneke |Preusig |1.56 |1.56 -//end::docsAbsEmployees-result[] +//end::abs-employees-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 850e573e84fc2..1ca1a0e4a7b8a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -3,8 +3,8 @@ metaFunctions#[skip:-8.13.99] meta functions; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "The arccosine of an angle, expressed in radians." | false | false | false +abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Number between -1 and 1. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians." | false | false | false asin |"double asin(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 1df9406d5a1e9..3b66543f4bfd0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -22,8 +23,19 @@ import java.util.function.Function; public class Abs extends UnaryScalarFunction { - @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Returns the absolute value.") - public Abs(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Returns the absolute value.", + examples = { @Example(file = "math", tag = "abs"), @Example(file = "math", tag = "abs-employees") } + ) + public Abs( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 68ac9c556c2e2..e4982fa69826f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,13 +22,17 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "The arccosine of an angle, expressed in radians.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians.", + examples = { @Example(file = "floats", tag = "acos") } + ) public Acos( Source source, @Param( name = "number", type = { "double", "integer", "long", "unsigned_long" }, - description = "Number between -1 and 1" + description = "Number between -1 and 1. If `null`, the function returns `null`." ) Expression n ) { super(source, n); From c4e9646c9749ff74ee1125666f0e2c487e73501a Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 21 Mar 2024 06:17:24 +0100 Subject: [PATCH 076/214] Re-enable the RemoteClusterSecurityBwcRestIT (#106570) This reverts commit 4ef0a8f1068f9701c087c54cdbd23f49742b7e7f / #106534 --- .../xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index 2db510ffc7f41..ccf9d66a5bc21 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.remotecluster; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -41,7 +40,6 @@ /** * BWC test which ensures that users and API keys with defined {@code remote_indices} privileges can be used to query legacy remote clusters */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104858") public class RemoteClusterSecurityBwcRestIT extends AbstractRemoteClusterSecurityTestCase { private static final Version OLD_CLUSTER_VERSION = Version.fromString(System.getProperty("tests.old_cluster_version")); From 19cffdef0c6d56d1ffe5384b252dedd432634d74 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 21 Mar 2024 08:00:37 +0200 Subject: [PATCH 077/214] Support non-keyword dimensions as routing fields in TSDB (#105501) * Fix test failure https://gradle-enterprise.elastic.co/s/icg66i6mwnjoi * Fix test failure https://gradle-enterprise.elastic.co/s/icg66i6mwnjoi * Nest pass-through objects within objects * Support numeric fields as pass-through dimensions * Update docs/changelog/105073.yaml * fix tests * fix tests * refactor dimension property * fix yaml * add numeric to routing builder * fix violation * fix yaml versions * handle all numeric values in dimension fields * use shardId in TSDB doc id * spotless fix * test fixes * index version guard * test fix * revert shardId changes * add routing id field mapper * Update docs/changelog/105501.yaml * fix shrunk test * use SortedDocValuesField * fix tests * delete unused yaml * submit test fixes * sync * restrict id override to latest version * update painless path * test fix * check for invalid id * fix painless tests * restore bwc routing changes * add unittest * refactor passthrough test * address comments * remove redundant variable * remove assert * fix id test * fix painless * skip storing routing field * skip generating the id when available * skip assert on routing in TranslogWriter * skip id mismatch assert * changelog update * revert redundant changes * remove supportsDimension * spotless * rename var for forcing dimension * small refactor * small refactor --- docs/changelog/105501.yaml | 5 ++ .../datastreams/TSDBIndexingIT.java | 17 ++----- .../TSDBPassthroughIndexingIT.java | 50 +++++++++++++------ .../test/data_stream/150_tsdb.yml | 36 ++++++------- .../test/tsdb/140_routing_path.yml | 6 +-- .../rest-api-spec/test/tsdb/20_mapping.yml | 14 +++--- .../index/mapper/AbstractScriptFieldType.java | 2 +- .../index/mapper/DocumentMapper.java | 2 +- .../index/mapper/FieldMapper.java | 20 ++++++++ .../index/mapper/IpFieldMapper.java | 21 ++++---- .../index/mapper/KeywordFieldMapper.java | 29 ++--------- .../index/mapper/MappedFieldType.java | 37 ++++++++++---- .../index/mapper/NumberFieldMapper.java | 19 +++---- .../index/mapper/PassThroughObjectMapper.java | 4 +- .../index/mapper/TimeSeriesIdFieldMapper.java | 2 + .../flattened/FlattenedFieldMapper.java | 36 +++---------- .../index/TimeSeriesModeTests.java | 22 +++----- .../index/mapper/KeywordFieldMapperTests.java | 9 ---- .../flattened/FlattenedFieldMapperTests.java | 4 +- .../index/mapper/MapperTestCase.java | 11 +++- .../index/mapper/NumberFieldMapperTests.java | 8 +-- .../unsignedlong/UnsignedLongFieldMapper.java | 8 ++- 22 files changed, 178 insertions(+), 184 deletions(-) create mode 100644 docs/changelog/105501.yaml diff --git a/docs/changelog/105501.yaml b/docs/changelog/105501.yaml new file mode 100644 index 0000000000000..2e5e375764640 --- /dev/null +++ b/docs/changelog/105501.yaml @@ -0,0 +1,5 @@ +pr: 105501 +summary: Support non-keyword dimensions as routing fields in TSDB +area: TSDB +type: enhancement +issues: [] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 08b09bbc78348..772cc0f98d757 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -260,7 +260,7 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce assertThat( e.getCause().getCause().getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [metricset] was not a dimension." ) @@ -289,7 +289,7 @@ public void testInvalidTsdbTemplatesNoTimeSeriesDimensionAttribute() throws Exce } } - public void testInvalidTsdbTemplatesNoKeywordFieldType() throws Exception { + public void testTsdbTemplatesNoKeywordFieldType() throws Exception { var mappingTemplate = """ { "_doc":{ @@ -315,18 +315,7 @@ public void testInvalidTsdbTemplatesNoKeywordFieldType() throws Exception { .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) .build() ); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet() - ); - assertThat( - e.getCause().getCause().getMessage(), - equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [metricset] was [long]." - ) - ); + client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet(); } public void testInvalidTsdbTemplatesMissingSettings() throws Exception { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java index 5d84baa5f6ea4..aa3fa2a730be3 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBPassthroughIndexingIT.java @@ -53,6 +53,17 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { public static final String MAPPING_TEMPLATE = """ { "_doc":{ + "dynamic_templates": [ + { + "strings_as_ip": { + "match_mapping_type": "string", + "match": "*ip", + "mapping": { + "type": "ip" + } + } + } + ], "properties": { "@timestamp" : { "type": "date" @@ -87,6 +98,8 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { "@timestamp": "$time", "attributes": { "metricset": "pod", + "number.long": $number1, + "number.double": $number2, "pod": { "name": "$name", "uid": "$uid", @@ -102,6 +115,15 @@ public class TSDBPassthroughIndexingIT extends ESSingleNodeTestCase { } """; + private static String getRandomDoc(Instant time) { + return DOC.replace("$time", formatInstant(time)) + .replace("$uid", randomUUID()) + .replace("$name", randomAlphaOfLength(4)) + .replace("$number1", Long.toString(randomLong())) + .replace("$number2", Double.toString(randomDouble())) + .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))); + } + @Override protected Collection> getPlugins() { return List.of(DataStreamsPlugin.class, InternalSettingsPlugin.class); @@ -137,13 +159,7 @@ public void testIndexingGettingAndSearching() throws Exception { Instant time = Instant.now(); for (int i = 0; i < indexingIters; i++) { var indexRequest = new IndexRequest("k8s").opType(DocWriteRequest.OpType.CREATE); - indexRequest.source( - DOC.replace("$time", formatInstant(time)) - .replace("$uid", randomUUID()) - .replace("$name", randomAlphaOfLength(4)) - .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))), - XContentType.JSON - ); + indexRequest.source(getRandomDoc(time), XContentType.JSON); var indexResponse = client().index(indexRequest).actionGet(); index = indexResponse.getIndex(); String id = indexResponse.getId(); @@ -176,7 +192,9 @@ public void testIndexingGettingAndSearching() throws Exception { ); @SuppressWarnings("unchecked") var attributes = (Map>) ObjectPath.eval("properties.attributes.properties", mapping); - assertMap(attributes.get("pod.ip"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); + assertMap(attributes.get("number.long"), matchesMap().entry("type", "long").entry("time_series_dimension", true)); + assertMap(attributes.get("number.double"), matchesMap().entry("type", "float").entry("time_series_dimension", true)); + assertMap(attributes.get("pod.ip"), matchesMap().entry("type", "ip").entry("time_series_dimension", true)); assertMap(attributes.get("pod.uid"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); assertMap(attributes.get("pod.name"), matchesMap().entry("type", "keyword").entry("time_series_dimension", true)); // alias field mappers: @@ -184,6 +202,14 @@ public void testIndexingGettingAndSearching() throws Exception { ObjectPath.eval("properties.metricset", mapping), matchesMap().entry("type", "alias").entry("path", "attributes.metricset") ); + assertMap( + ObjectPath.eval("properties.number.properties.long", mapping), + matchesMap().entry("type", "alias").entry("path", "attributes.number.long") + ); + assertMap( + ObjectPath.eval("properties.number.properties.double", mapping), + matchesMap().entry("type", "alias").entry("path", "attributes.number.double") + ); assertMap( ObjectPath.eval("properties.pod.properties", mapping), matchesMap().extraOk().entry("name", matchesMap().entry("type", "alias").entry("path", "attributes.pod.name")) @@ -220,13 +246,7 @@ public void testIndexingGettingAndSearchingShrunkIndex() throws Exception { var bulkRequest = new BulkRequest(dataStreamName); for (int i = 0; i < numBulkItems; i++) { var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); - indexRequest.source( - DOC.replace("$time", formatInstant(time)) - .replace("$uid", randomUUID()) - .replace("$name", randomAlphaOfLength(4)) - .replace("$ip", InetAddresses.toAddrString(randomIp(randomBoolean()))), - XContentType.JSON - ); + indexRequest.source(getRandomDoc(time), XContentType.JSON); bulkRequest.add(indexRequest); time = time.plusMillis(1); } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 20eb33ecefdee..b37317208d0dc 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -231,13 +231,13 @@ dynamic templates: refresh: true body: - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": "10", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z", "data": "10", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:09.138Z", "data": "20", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z", "data": "20", "attributes.dim1": "A", "attributes.dim2": "1", "attributes.another.dim1": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.138Z", "data": "30", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z", "data": "30", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.238Z", "data": "40", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z", "data": "40", "attributes.dim1": "B", "attributes.dim2": "2", "attributes.another.dim1": "D", "attributes.another.dim2": "20.5" }' - do: search: @@ -263,7 +263,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -282,7 +282,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -301,7 +301,7 @@ dynamic templates: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -313,14 +313,14 @@ dynamic templates: filterA: filter: term: - another.dim2: 10 + another.dim2: 10.5 aggs: tsids: terms: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiK8yYWLhfZ18WLDvTuBX1YJX1Ll7UMNJqYNES5Eg" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MD2HE8yse1ZklY-p0-bRcC8gYpiKqVppKhfZ18WLDvTuNPo7EnyZdkhvafL006Xf2Q" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- @@ -466,13 +466,13 @@ dynamic templates with nesting: refresh: true body: - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10" }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20" }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5" }' - do: search: @@ -498,7 +498,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -517,7 +517,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -536,7 +536,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -548,14 +548,14 @@ dynamic templates with nesting: filterA: filter: term: - another.dim2: 10 + another.dim2: 10.5 aggs: tsids: terms: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK7zJhYuIGKYiosO9O4X2dfFtp-JEbk39FSSMEq_vwX7uw" } + - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml index 5813445326ef6..2ee2391458b03 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml @@ -90,11 +90,11 @@ missing routing path field: --- missing dimension on routing path field: - skip: - version: " - 8.7.99" - reason: error message changed in 8.8.0 + version: " - 8.13.99" + reason: error message changed in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[tag\] was not a dimension./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[tag\] was not a dimension./' indices.create: index: test body: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 1ff32192b9e08..807f60a0faf35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -121,11 +121,11 @@ top level wildcard dim object: --- exact match object type: - skip: - version: " - 8.7.99" - reason: routing_path error message updated in 8.8.0 + version: " - 8.13.99" + reason: routing_path error message updated in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[dim\] was \[object\]./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[dim\] was \[object\]./' indices.create: index: tsdb_index body: @@ -154,11 +154,11 @@ exact match object type: --- non keyword matches routing_path: - skip: - version: " - 8.7.99" - reason: routing_path error message updated in 8.8.0 + version: " - 8.13.99" + reason: routing_path error message updated in 8.14.0 - do: - catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[@timestamp\] was \[date\]./' + catch: '/All fields that match routing_path must be configured with \[time_series_dimension: true\] or flattened fields with a list of dimensions in \[time_series_dimensions\] and without the \[script\] parameter. \[@timestamp\] was not a dimension./' indices.create: index: test_index body: @@ -273,7 +273,7 @@ runtime field matching routing path: body: - '{"index": {}}' - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim_kw": "dim", "dim": {"foo": "a"}, "extra_field": 100}' - - match: {items.0.index.error.reason: "All fields that match routing_path must be keywords with [time_series_dimension: true] or flattened fields with a list of dimensions in [time_series_dimensions] and without the [script] parameter. [dim.foo] was a runtime [keyword]."} + - match: {items.0.index.error.reason: "All fields that match routing_path must be configured with [time_series_dimension: true] or flattened fields with a list of dimensions in [time_series_dimensions] and without the [script] parameter. [dim.foo] was a runtime [keyword]."} --- "dynamic: false matches routing_path": diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java index e9bf5838be8b3..cf453bd1571be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldType.java @@ -208,7 +208,7 @@ protected final LeafFactory leafFactory(SearchExecutionContext context) { public void validateMatchedRoutingPath(final String routingPath) { throw new IllegalArgumentException( "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " + + "must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] " + "and without the [script] parameter. [" + name() diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index f7dc09cdbb370..9b3496acfd9f3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -125,7 +125,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { // object type is not allowed in the routing paths if (path.equals(objectName)) { throw new IllegalArgumentException( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] " + "and without the [script] parameter. [" + objectName diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 71fd9edd49903..e029aaa657d23 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -1415,6 +1415,26 @@ private static boolean isDeprecatedParameter(String propName, IndexVersion index } } + /** + * Creates mappers for fields that can act as time-series dimensions. + */ + public abstract static class DimensionBuilder extends Builder { + + private boolean inheritDimensionParameterFromParentObject = false; + + public DimensionBuilder(String name) { + super(name); + } + + void setInheritDimensionParameterFromParentObject() { + this.inheritDimensionParameterFromParentObject = true; + } + + protected boolean inheritDimensionParameterFromParentObject(MapperBuilderContext context) { + return inheritDimensionParameterFromParentObject || context.parentObjectContainsDimensions(); + } + } + public static BiConsumer notInMultiFields(String type) { return (n, c) -> { if (c.isWithinMultiField()) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 355b38d4dcb96..2e0fc68770045 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -69,7 +69,7 @@ private static IpFieldMapper toType(FieldMapper in) { return (IpFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -166,7 +166,7 @@ protected Parameter[] getParameters() { @Override public IpFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } return new IpFieldMapper( @@ -246,6 +246,16 @@ public boolean mayExistInIndex(SearchExecutionContext context) { return context.fieldExistsInIndex(name()); } + @Override + public boolean isDimension() { + return isDimension; + } + + @Override + public boolean hasScriptValues() { + return scriptValues != null; + } + private static InetAddress parse(Object value) { if (value instanceof InetAddress) { return (InetAddress) value; @@ -461,13 +471,6 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi } return terms.intersect(prefixAutomaton, searchBytes); } - - /** - * @return true if field has been marked as a dimension field - */ - public boolean isDimension() { - return isDimension; - } } private final boolean indexed; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 06e689784b087..4024798a85370 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -137,7 +137,7 @@ private static KeywordFieldMapper toType(FieldMapper in) { return (KeywordFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed = Parameter.indexParam(m -> toType(m).indexed, true); private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -304,7 +304,7 @@ private KeywordFieldType buildFieldType(MapperBuilderContext context, FieldType } else if (splitQueriesOnWhitespace.getValue()) { searchAnalyzer = Lucene.WHITESPACE_ANALYZER; } - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension(true); } return new KeywordFieldType( @@ -811,35 +811,14 @@ public int ignoreAbove() { return ignoreAbove; } - /** - * @return true if field has been marked as a dimension field - */ @Override public boolean isDimension() { return isDimension; } @Override - public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was not a dimension." - ); - } - if (scriptValues != null) { - throw new IllegalArgumentException( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] has a [script] parameter." - ); - } + public boolean hasScriptValues() { + return scriptValues != null; } public boolean hasNormalizer() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 1707871066645..a554e6e44a8e8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -196,6 +196,13 @@ public boolean isDimension() { return false; } + /** + * @return true if field has script values. + */ + public boolean hasScriptValues() { + return false; + } + /** * @return a list of dimension fields. Expected to be used by fields that have * nested fields or that, in some way, identify a collection of fields by means @@ -623,16 +630,26 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi * Validate that this field can be the target of {@link IndexMetadata#INDEX_ROUTING_PATH}. */ public void validateMatchedRoutingPath(String routingPath) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was [" - + typeName() - + "]." - ); + if (hasScriptValues()) { + throw new IllegalArgumentException( + "All fields that match routing_path must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + name() + + "] has a [script] parameter." + ); + } + + if (isDimension() == false) { + throw new IllegalArgumentException( + "All fields that match routing_path " + + "must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + name() + + "] was not a dimension." + ); + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 2245e527c2aa2..c04c3e5afdc70 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -70,7 +70,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -89,7 +88,7 @@ private static NumberFieldMapper toType(FieldMapper in) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); @@ -183,11 +182,6 @@ public Builder( } }); this.dimension = TimeSeriesParams.dimensionParam(m -> toType(m).dimension).addValidator(v -> { - if (v && EnumSet.of(NumberType.INTEGER, NumberType.LONG, NumberType.BYTE, NumberType.SHORT).contains(type) == false) { - throw new IllegalArgumentException( - "Parameter [" + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + "] cannot be set to numeric type [" + type.name + "]" - ); - } if (v && (indexed.getValue() == false || hasDocValues.getValue() == false)) { throw new IllegalArgumentException( "Field [" @@ -267,7 +261,7 @@ protected Parameter[] getParameters() { @Override public NumberFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } @@ -1745,13 +1739,16 @@ public CollapseType collapseType() { return CollapseType.NUMERIC; } - /** - * @return true if field has been marked as a dimension field - */ + @Override public boolean isDimension() { return isDimension; } + @Override + public boolean hasScriptValues() { + return scriptValues != null; + } + /** * If field is a time series metric field, returns its metric type * @return the metric type or null diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 05ae7e59f69c3..16b4d0b49917f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -41,8 +41,8 @@ public Builder(String name) { @Override public PassThroughObjectMapper.Builder add(Mapper.Builder builder) { - if (timeSeriesDimensionSubFields.value() && builder instanceof KeywordFieldMapper.Builder keywordBuilder) { - keywordBuilder.dimension(true); + if (timeSeriesDimensionSubFields.value() && builder instanceof FieldMapper.DimensionBuilder dimensionBuilder) { + dimensionBuilder.setInheritDimensionParameterFromParentObject(); } super.add(builder); return this; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 2d330e433d444..112b3ec96b39e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -413,6 +413,8 @@ public static Map decodeTsidAsMap(StreamInput in) { Object ul = DocValueFormat.UNSIGNED_LONG_SHIFTED.format(in.readLong()); result.put(name, ul); } + case (byte) 'd' -> // parse a double + result.put(name, in.readDouble()); default -> throw new IllegalArgumentException("Cannot parse [" + name + "]: Unknown type [" + type + "]"); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index 5a8efb6c8ed59..8feaba73b1dd4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -231,6 +231,11 @@ public static final class KeyedFlattenedFieldType extends StringFieldType { private final String rootName; private final boolean isDimension; + @Override + public boolean isDimension() { + return isDimension; + } + KeyedFlattenedFieldType( String rootName, boolean indexed, @@ -280,24 +285,6 @@ public Query existsQuery(SearchExecutionContext context) { return new PrefixQuery(term); } - @Override - public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + this.rootName - + "." - + this.key - + "] was [" - + typeName() - + "]." - ); - } - } - @Override public Query rangeQuery( Object lowerTerm, @@ -737,17 +724,8 @@ public List dimensions() { @Override public void validateMatchedRoutingPath(final String routingPath) { - if (false == isDimension && this.dimensions.contains(routingPath) == false) { - throw new IllegalArgumentException( - "All fields that match routing_path " - + "must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + name() - + "] was [" - + typeName() - + "]." - ); + if (this.dimensions.contains(routingPath) == false) { + super.validateMatchedRoutingPath(routingPath); } } } diff --git a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java index fc29d13667d33..6118a84814462 100644 --- a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java @@ -174,7 +174,7 @@ public void testRoutingPathEqualsObjectNameError() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.o] was [object]." ) @@ -192,29 +192,21 @@ public void testRoutingPathMatchesNonDimensionKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.non_dim] was not a dimension." ) ); } - public void testRoutingPathMatchesNonKeyword() { + public void testRoutingPathMatchesNonKeyword() throws IOException { Settings s = getSettings(randomBoolean() ? "dim.non_kwd" : "dim.*"); - Exception e = expectThrows(IllegalArgumentException.class, () -> createMapperService(s, mapping(b -> { + createMapperService(s, mapping(b -> { b.startObject("dim").startObject("properties"); b.startObject("non_kwd").field("type", "integer").field("time_series_dimension", true).endObject(); b.startObject("dim").field("type", "keyword").field("time_series_dimension", true).endObject(); b.endObject().endObject(); - }))); - assertThat( - e.getMessage(), - equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [dim.non_kwd] was [integer]." - ) - ); + })); } public void testRoutingPathMatchesScriptedKeyword() { @@ -229,7 +221,7 @@ public void testRoutingPathMatchesScriptedKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.kwd] has a [script] parameter." ) @@ -245,7 +237,7 @@ public void testRoutingPathMatchesRuntimeKeyword() { assertThat( e.getMessage(), equalTo( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [dim.kwd] was a runtime [keyword]." ) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 542d0088f2ad0..70e375a89d5e7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -601,15 +601,6 @@ protected boolean dedupAfterFetch() { return true; } - @Override - protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { - return "All fields that match routing_path must be keywords with [time_series_dimension: true] " - + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [" - + mapper.name() - + "] was not a dimension."; - } - public void testDimensionInRoutingPath() throws IOException { MapperService mapper = createMapperService(fieldMapping(b -> b.field("type", "keyword").field("time_series_dimension", true))); IndexSettings settings = createIndexSettings( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java index e4ea78f3b7a0e..4f23c86f53cca 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapperTests.java @@ -467,9 +467,9 @@ public void testMissingDimensionInRoutingPath() throws IOException { ); Exception ex = expectThrows(IllegalArgumentException.class, () -> mapper.documentMapper().validate(settings, false)); assertEquals( - "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " - + "without the [script] parameter. [field.key3] was [flattened].", + + "without the [script] parameter. [field._keyed] was not a dimension.", ex.getMessage() ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 43ac8057a3fc0..fa0f0e1b95f54 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1030,8 +1030,15 @@ public final void testMinimalIsInvalidInRoutingPath() throws IOException { } } - protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { - return "All fields that match routing_path must be keywords with [time_series_dimension: true] " + private String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { + if (mapper instanceof FieldMapper fieldMapper && fieldMapper.fieldType().isDimension() == false) { + return "All fields that match routing_path must be configured with [time_series_dimension: true] " + + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + + "without the [script] parameter. [" + + mapper.name() + + "] was not a dimension."; + } + return "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [" + mapper.name() diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 81848b5a50114..c60a913a63b33 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -253,12 +253,8 @@ public void testDimension() throws IOException { // dimension = false is allowed assertDimension(false, NumberFieldMapper.NumberFieldType::isDimension); - // dimension = true is not allowed - Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> { - minimalMapping(b); - b.field("time_series_dimension", true); - }))); - assertThat(e.getCause().getMessage(), containsString("Parameter [time_series_dimension] cannot be set")); + // dimension = true is allowed + assertDimension(true, NumberFieldMapper.NumberFieldType::isDimension); } public void testMetricType() throws IOException { diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 52424956ef53e..e0ce1f92b2a37 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -76,7 +76,7 @@ private static UnsignedLongFieldMapper toType(FieldMapper in) { return (UnsignedLongFieldMapper) in; } - public static final class Builder extends FieldMapper.Builder { + public static final class Builder extends FieldMapper.DimensionBuilder { private final Parameter indexed; private final Parameter hasDocValues = Parameter.docValuesParam(m -> toType(m).hasDocValues, true); private final Parameter stored = Parameter.storeParam(m -> toType(m).stored, false); @@ -195,7 +195,7 @@ Number parsedNullValue() { @Override public UnsignedLongFieldMapper build(MapperBuilderContext context) { - if (context.parentObjectContainsDimensions()) { + if (inheritDimensionParameterFromParentObject(context)) { dimension.setValue(true); } UnsignedLongFieldType fieldType = new UnsignedLongFieldType( @@ -539,9 +539,7 @@ static Long parseUpperRangeTerm(Object value, boolean include) { return longValue; } - /** - * @return true if field has been marked as a dimension field - */ + @Override public boolean isDimension() { return isDimension; } From 4374a385c5d4398be8146cb22a750fca06956122 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 21 Mar 2024 09:39:22 +0100 Subject: [PATCH 078/214] Use historical features for Yaml REST tests for bulk api (#106402) --- .../rest-api-spec/test/bulk/10_basic.yml | 12 ++++++------ .../test/bulk/11_dynamic_templates.yml | 10 +++++----- .../rest-api-spec/test/bulk/90_pipeline.yml | 4 ++-- .../test/rest/yaml/YamlTestLegacyFeatures.java | 15 +++++++++++++++ 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml index 0d5455ca22317..2fde1f48e93df 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/10_basic.yml @@ -59,8 +59,8 @@ --- "Empty _id with op_type create": - - skip: - version: " - 7.4.99" + - requires: + cluster_features: ["bulk_auto_id"] reason: "auto id + op type create only supported since 7.5" - do: @@ -119,8 +119,8 @@ --- "When setting require_alias flag per request": - - skip: - version: " - 7.9.99" + - requires: + cluster_features: ["bulk_require_alias"] reason: "require_alias flag was added in version 7.10" - do: @@ -162,8 +162,8 @@ index: new_index_not_created --- "When setting require_alias flag": - - skip: - version: " - 7.9.99" + - requires: + cluster_features: ["bulk_require_alias"] reason: "require_alias flag was added in version 7.10" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml index 348d7a6fd0ef1..6e9502fb32f95 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/11_dynamic_templates.yml @@ -1,8 +1,8 @@ --- "Dynamic templates": - - skip: - features: contains - version: " - 8.7.99" + - requires: + test_runner_features: ["contains"] + cluster_features: ["bulk_dynamic_template_document_parse_exception"] reason: "Exception type has changed in 8.8.0" - do: @@ -175,8 +175,8 @@ --- "Dynamic templates with op_type": - - skip: - version: " - 8.6.0" + - requires: + cluster_features: ["bulk_dynamic_template_op_type"] reason: "bug fixed in 8.6.1" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml index ba34604231268..3e919c5960278 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/90_pipeline.yml @@ -1,7 +1,7 @@ --- "One request has pipeline and another not": - - skip: - version: " - 7.9.0" + - requires: + cluster_features: ["bulk_pipeline_validate"] reason: "fixed in 7.9.1" - do: bulk: diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java index fb9918e1f85f1..326afdaa7ae1a 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java @@ -20,6 +20,15 @@ */ public class YamlTestLegacyFeatures implements FeatureSpecification { + private static final NodeFeature BULK_AUTO_ID = new NodeFeature("bulk_auto_id"); + private static final NodeFeature BULK_REQUIRE_ALIAS = new NodeFeature("bulk_require_alias"); + private static final NodeFeature BULK_DYNAMIC_TEMPLATE_OP_TYPE = new NodeFeature("bulk_dynamic_template_op_type"); + private static final NodeFeature BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION = new NodeFeature( + "bulk_dynamic_template_document_parse_exception" + ); + + private static final NodeFeature BULK_PIPELINE_VALIDATE = new NodeFeature("bulk_pipeline_validate"); + private static final NodeFeature CAT_ALIASES_SHOW_WRITE_INDEX = new NodeFeature("cat_aliases_show_write_index"); private static final NodeFeature CAT_ALIASES_HIDDEN = new NodeFeature("cat_aliases_hidden"); private static final NodeFeature CAT_ALIASES_LOCAL_DEPRECATED = new NodeFeature("cat_aliases_local_deprecated"); @@ -45,6 +54,12 @@ public class YamlTestLegacyFeatures implements FeatureSpecification { @Override public Map getHistoricalFeatures() { return Map.ofEntries( + Map.entry(BULK_AUTO_ID, Version.V_7_5_0), + Map.entry(BULK_REQUIRE_ALIAS, Version.V_7_10_0), + Map.entry(BULK_PIPELINE_VALIDATE, Version.V_7_9_1), + Map.entry(BULK_DYNAMIC_TEMPLATE_OP_TYPE, Version.V_8_6_1), + Map.entry(BULK_DYNAMIC_TEMPLATE_DOCUMENT_PARSE_EXCEPTION, Version.V_8_8_0), + Map.entry(CAT_ALIASES_SHOW_WRITE_INDEX, Version.V_7_4_0), Map.entry(CAT_ALIASES_HIDDEN, Version.V_7_7_0), Map.entry(CAT_ALIASES_LOCAL_DEPRECATED, Version.V_8_12_0), From c91409c2a02fa0fd6ced8e4297b1e53333f88a09 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 21 Mar 2024 09:14:13 +0000 Subject: [PATCH 079/214] Improve threading in `TransportGetSnapshotsAction` (#106527) There's a comment about intentionally avoiding using the `SNAPSHOT_META` pool in `TransportGetSnapshotsAction`, but in fact we implicitly fork onto the `SNAPSHOT_META` pool in a few spots and end up doing much of the work there anyway. This commit fixes things to use the `MANAGEMENT` pool for all this work as originally intended. --- .../get/TransportGetSnapshotsAction.java | 223 ++++++++++-------- 1 file changed, 124 insertions(+), 99 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 45389f4aba2fa..898adf721be33 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -10,8 +10,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.SnapshotsInProgress; @@ -27,7 +29,6 @@ import org.elasticsearch.common.util.concurrent.AbstractThrottledTaskRunner; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.common.util.concurrent.ThrottledIterator; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; @@ -230,46 +231,71 @@ private class GetSnapshotsOperation { } void getMultipleReposSnapshotInfo(ActionListener listener) { - try (var listeners = new RefCountingListener(listener.map(ignored -> { - cancellableTask.ensureNotCancelled(); - final var sortedSnapshotsInRepos = sortSnapshots( - allSnapshotInfos.stream().flatMap(Collection::stream), - totalCount.get(), - offset, - size - ); - final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); - assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); - final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); - return new GetSnapshotsResponse( - snapshotInfos, - failuresByRepository, - finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, - totalCount.get(), - finalRemaining - ); - }))) { - for (final RepositoryMetadata repository : repositories) { - final String repoName = repository.name(); - if (skipRepository(repoName)) { - // TODO we should still count the matching snapshots in totalCount - continue; - } + SubscribableListener + + .newForked(repositoriesDoneListener -> { + try (var listeners = new RefCountingListener(repositoriesDoneListener)) { + for (final RepositoryMetadata repository : repositories) { + final String repoName = repository.name(); + if (skipRepository(repoName)) { + continue; + } - getSingleRepoSnapshotInfo(repoName, listeners.acquire((SnapshotsInRepo snapshotsInRepo) -> { - allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); - remaining.addAndGet(snapshotsInRepo.remaining()); - totalCount.addAndGet(snapshotsInRepo.totalCount()); - }).delegateResponse((l, e) -> { - if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { - failuresByRepository.put(repoName, elasticsearchException); - l.onResponse(SnapshotsInRepo.EMPTY); - } else { - l.onFailure(e); + SubscribableListener + + .newForked(repositoryDataListener -> { + if (snapshotNamePredicate == SnapshotNamePredicate.MATCH_CURRENT_ONLY) { + repositoryDataListener.onResponse(null); + } else { + repositoriesService.repository(repoName).getRepositoryData(executor, repositoryDataListener); + } + }) + + .andThen((l, repositoryData) -> loadSnapshotInfos(repoName, repositoryData, l)) + + .addListener(new DelegatingActionListener<>(listeners.acquire()) { + @Override + public void onResponse(SnapshotsInRepo snapshotsInRepo) { + allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); + remaining.addAndGet(snapshotsInRepo.remaining()); + totalCount.addAndGet(snapshotsInRepo.totalCount()); + delegate.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + if (isMultiRepoRequest && e instanceof ElasticsearchException elasticsearchException) { + failuresByRepository.put(repoName, elasticsearchException); + delegate.onResponse(null); + } else { + delegate.onFailure(e); + } + } + }); } - })); - } - } + } + }) + + .addListener(listener.map(ignored -> { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); + cancellableTask.ensureNotCancelled(); + final var sortedSnapshotsInRepos = sortSnapshots( + allSnapshotInfos.stream().flatMap(Collection::stream), + totalCount.get(), + offset, + size + ); + final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); + assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); + final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); + return new GetSnapshotsResponse( + snapshotInfos, + failuresByRepository, + finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, + totalCount.get(), + finalRemaining + ); + })); } private boolean skipRepository(String repositoryName) { @@ -281,20 +307,9 @@ private boolean skipRepository(String repositoryName) { } } - private void getSingleRepoSnapshotInfo(String repo, ActionListener listener) { - final ListenableFuture repositoryDataListener = new ListenableFuture<>(); - if (snapshotNamePredicate == SnapshotNamePredicate.MATCH_CURRENT_ONLY) { - repositoryDataListener.onResponse(null); - } else { - repositoriesService.getRepositoryData(repo, repositoryDataListener); - } - - repositoryDataListener.addListener( - listener.delegateFailureAndWrap((l, repositoryData) -> loadSnapshotInfos(repo, repositoryData, l)) - ); - } - private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryData, ActionListener listener) { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); + if (cancellableTask.notifyIfCancelled(listener)) { return; } @@ -373,56 +388,65 @@ private void snapshots(String repositoryName, Collection snapshotIds } } // then, look in the repository if there's any matching snapshots left - try ( - var listeners = new RefCountingListener( - // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - listener.safeMap(v -> sortSnapshotsWithNoOffsetOrLimit(snapshots)) - ) - ) { - if (snapshotIdsToIterate.isEmpty()) { - return; - } + SubscribableListener - final Repository repository; - try { - repository = repositoriesService.repository(repositoryName); - } catch (RepositoryMissingException e) { - listeners.acquire().onFailure(e); - return; - } + .newForked(l -> { + try (var listeners = new RefCountingListener(l)) { + if (snapshotIdsToIterate.isEmpty()) { + return; + } - // only need to synchronize accesses related to reading SnapshotInfo from the repo - final List syncSnapshots = Collections.synchronizedList(snapshots); - - ThrottledIterator.run( - Iterators.failFast(snapshotIdsToIterate.iterator(), () -> cancellableTask.isCancelled() || listeners.isFailing()), - (ref, snapshotId) -> { - final var refListener = ActionListener.runBefore(listeners.acquire(), ref::close); - getSnapshotInfoExecutor.getSnapshotInfo(repository, snapshotId, new ActionListener<>() { - @Override - public void onResponse(SnapshotInfo snapshotInfo) { - if (matchesPredicates(snapshotInfo)) { - syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); - } - refListener.onResponse(null); - } + final Repository repository; + try { + repository = repositoriesService.repository(repositoryName); + } catch (RepositoryMissingException e) { + listeners.acquire().onFailure(e); + return; + } - @Override - public void onFailure(Exception e) { - if (ignoreUnavailable) { - logger.warn(Strings.format("failed to fetch snapshot info for [%s:%s]", repository, snapshotId), e); - refListener.onResponse(null); - } else { - refListener.onFailure(e); - } - } - }); - }, - getSnapshotInfoExecutor.getMaxRunningTasks(), - () -> {}, - () -> {} - ); - } + // only need to synchronize accesses related to reading SnapshotInfo from the repo + final List syncSnapshots = Collections.synchronizedList(snapshots); + + ThrottledIterator.run( + Iterators.failFast( + snapshotIdsToIterate.iterator(), + () -> cancellableTask.isCancelled() || listeners.isFailing() + ), + (ref, snapshotId) -> { + final var refListener = ActionListener.runBefore(listeners.acquire(), ref::close); + getSnapshotInfoExecutor.getSnapshotInfo(repository, snapshotId, new ActionListener<>() { + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + if (matchesPredicates(snapshotInfo)) { + syncSnapshots.add(snapshotInfo.maybeWithoutIndices(indices)); + } + refListener.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + if (ignoreUnavailable) { + logger.warn( + Strings.format("failed to fetch snapshot info for [%s:%s]", repository, snapshotId), + e + ); + refListener.onResponse(null); + } else { + refListener.onFailure(e); + } + } + }); + }, + getSnapshotInfoExecutor.getMaxRunningTasks(), + () -> {}, + () -> {} + ); + } + }) + + .addListener(listener.safeMap(v -> + // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here + sortSnapshotsWithNoOffsetOrLimit(snapshots)), executor, threadPool.getThreadContext()); } private SnapshotsInRepo buildSimpleSnapshotInfos( @@ -467,6 +491,7 @@ private SnapshotsInRepo sortSnapshotsWithNoOffsetOrLimit(List snap } private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); final var resultsStream = snapshotInfoStream.filter(sortBy.getAfterPredicate(after, order)) .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); From 2120683964bd1c8852833c4086916006c1bc071d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Thu, 21 Mar 2024 10:58:29 +0100 Subject: [PATCH 080/214] [Transform] Fix `_reset` API when called with `force=true` on a failed transform (#106574) --- docs/changelog/106574.yaml | 6 ++ .../integration/TransformResetIT.java | 11 +++- .../integration/TransformRestTestCase.java | 23 ++++++++ .../integration/TransformRobustnessIT.java | 24 -------- .../TransformTaskFailedStateIT.java | 56 ++++++++++++++++++- .../action/TransportResetTransformAction.java | 9 ++- 6 files changed, 101 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/106574.yaml diff --git a/docs/changelog/106574.yaml b/docs/changelog/106574.yaml new file mode 100644 index 0000000000000..8063450bc0db1 --- /dev/null +++ b/docs/changelog/106574.yaml @@ -0,0 +1,6 @@ +pr: 106574 +summary: Fix `_reset` API when called with `force=true` on a failed transform +area: Transform +type: bug +issues: + - 106573 diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java index dd7ad718812a1..7ed6466357e8f 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformResetIT.java @@ -32,6 +32,13 @@ public class TransformResetIT extends TransformRestTestCase { TEST_PASSWORD_SECURE_STRING ); private static final String DATA_ACCESS_ROLE = "test_data_access"; + private static final String SYNC_CONFIG = """ + "sync": { + "time": { + "field": "timestamp" + } + }, + """; private static boolean indicesCreated = false; @@ -132,6 +139,7 @@ public void testResetDeletesDestinationIndex() throws Exception { } private static String createConfig(String transformDestIndex) { + boolean isContinuous = randomBoolean(); return Strings.format(""" { "dest": { @@ -140,6 +148,7 @@ private static String createConfig(String transformDestIndex) { "source": { "index": "%s" }, + %s "pivot": { "group_by": { "reviewer": { @@ -156,6 +165,6 @@ private static String createConfig(String transformDestIndex) { } } } - }""", transformDestIndex, REVIEWS_INDEX_NAME); + }""", transformDestIndex, REVIEWS_INDEX_NAME, isContinuous ? SYNC_CONFIG : ""); } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index ce1178e760a6c..7c74e918a039f 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -31,6 +31,7 @@ import java.io.IOException; import java.time.Instant; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -734,4 +735,26 @@ private void logAudits() throws Exception { } }, 5, TimeUnit.SECONDS); } + + @SuppressWarnings("unchecked") + protected List getTransformTasks() throws IOException { + final Request tasksRequest = new Request("GET", "/_tasks"); + tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); + Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); + + Map nodes = (Map) tasksResponse.get("nodes"); + if (nodes == null) { + return List.of(); + } + + List foundTasks = new ArrayList<>(); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + Map tasks = (Map) nodeInfo.get("tasks"); + if (tasks != null) { + foundTasks.addAll(tasks.keySet()); + } + } + return foundTasks; + } } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java index 105ac09e356fd..e537a6f280ac0 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRobustnessIT.java @@ -15,10 +15,8 @@ import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.containsString; @@ -150,28 +148,6 @@ public void testCancellingTransformTask() throws Exception { assertThat(getTransformTasks(), is(empty())); } - @SuppressWarnings("unchecked") - private List getTransformTasks() throws IOException { - final Request tasksRequest = new Request("GET", "/_tasks"); - tasksRequest.addParameter("actions", TransformField.TASK_NAME + "*"); - Map tasksResponse = entityAsMap(client().performRequest(tasksRequest)); - - Map nodes = (Map) tasksResponse.get("nodes"); - if (nodes == null) { - return List.of(); - } - - List foundTasks = new ArrayList<>(); - for (Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Map tasks = (Map) nodeInfo.get("tasks"); - if (tasks != null) { - foundTasks.addAll(tasks.keySet()); - } - } - return foundTasks; - } - private void beEvilAndDeleteTheTransformIndex() throws IOException { final Request deleteRequest = new Request("DELETE", TransformInternalIndexConstants.LATEST_INDEX_NAME); deleteRequest.setOptions( diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index 9f74d445252d2..1abf611e833c4 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -24,10 +24,12 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesRegex; +import static org.hamcrest.Matchers.nullValue; public class TransformTaskFailedStateIT extends TransformRestTestCase { @@ -61,6 +63,9 @@ public void testForceStopFailedTransform() throws Exception { String transformIndex = "failure_pivot_reviews"; createDestinationIndexWithBadMapping(transformIndex); createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); @@ -72,6 +77,8 @@ public void testForceStopFailedTransform() throws Exception { // Verify we have failed for the expected reason assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat(getTransformTasks(), hasSize(1)); + // verify that we cannot stop a failed transform ResponseException ex = expectThrows(ResponseException.class, () -> stopTransform(transformId, false)); assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); @@ -90,6 +97,44 @@ public void testForceStopFailedTransform() throws Exception { awaitState(transformId, TransformStats.State.STOPPED); fullState = getTransformStateAndStats(transformId); assertThat(XContentMapValues.extractValue("reason", fullState), is(nullValue())); + + assertThat(getTransformTasks(), is(empty())); + } + + public void testForceResetFailedTransform() throws Exception { + String transformId = "test-force-reset-failed-transform"; + createReviewsIndex(REVIEWS_INDEX_NAME, 10, 27, "date", false, -1, null); + String transformIndex = "failure_pivot_reviews"; + createDestinationIndexWithBadMapping(transformIndex); + createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + + startTransform(transformId); + awaitState(transformId, TransformStats.State.FAILED); + Map fullState = getTransformStateAndStats(transformId); + final String failureReason = "Failed to index documents into destination index due to permanent error: " + + "\\[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced \\[7\\] " + + "failures and at least 1 irrecoverable " + + "\\[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " + + "incompatible with the transform configuration.;.*"; + // Verify we have failed for the expected reason + assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + + assertThat(getTransformTasks(), hasSize(1)); + + // verify that we cannot reset a failed transform + ResponseException ex = expectThrows(ResponseException.class, () -> resetTransform(transformId, false)); + assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); + assertThat( + (String) XContentMapValues.extractValue("error.reason", entityAsMap(ex.getResponse())), + is(equalTo("Cannot reset transform [test-force-reset-failed-transform] as the task is running. Stop the task first")) + ); + + // Verify that we can force reset a failed transform + resetTransform(transformId, true); + + assertThat(getTransformTasks(), is(empty())); } public void testStartFailedTransform() throws Exception { @@ -98,6 +143,9 @@ public void testStartFailedTransform() throws Exception { String transformIndex = "failure_pivot_reviews"; createDestinationIndexWithBadMapping(transformIndex); createContinuousPivotReviewsTransform(transformId, transformIndex, null); + + assertThat(getTransformTasks(), is(empty())); + startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); @@ -109,6 +157,8 @@ public void testStartFailedTransform() throws Exception { // Verify we have failed for the expected reason assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat(getTransformTasks(), hasSize(1)); + final String expectedFailure = "Unable to start transform \\[test-force-start-failed-transform\\] " + "as it is in a failed state with failure: \\[" + failureReason @@ -124,6 +174,8 @@ public void testStartFailedTransform() throws Exception { }, 60, TimeUnit.SECONDS); stopTransform(transformId, true); + + assertThat(getTransformTasks(), is(empty())); } private void awaitState(String transformId, TransformStats.State state) throws Exception { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index ee394c7a128b4..87f24ae7c2bc8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -154,7 +154,14 @@ protected void masterOperation(Task task, Request request, ClusterState state, A stopTransformActionListener.onResponse(null); return; } - StopTransformAction.Request stopTransformRequest = new StopTransformAction.Request(request.getId(), true, false, null, true, false); + StopTransformAction.Request stopTransformRequest = new StopTransformAction.Request( + request.getId(), + true, + request.isForce(), + null, + true, + false + ); executeAsyncWithOrigin(client, TRANSFORM_ORIGIN, StopTransformAction.INSTANCE, stopTransformRequest, stopTransformActionListener); } From 992e9b5dd75fe6194005484e4e452328f3360f4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Thu, 21 Mar 2024 11:14:18 +0100 Subject: [PATCH 081/214] [Profiling] Switch to OTEL field host.arch (#106585) --- .../component-template/profiling-hosts.json | 9 ++-- .../resources/data/profiling-hosts.ndjson | 4 +- .../xpack/profiling/CO2Calculator.java | 2 +- .../xpack/profiling/HostMetadata.java | 14 +++-- .../ProfilingIndexTemplateRegistry.java | 3 +- .../xpack/profiling/HostMetadataTests.java | 53 +++++++++++++------ .../rest-api-spec/test/profiling/10_basic.yml | 2 +- 7 files changed, 58 insertions(+), 29 deletions(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index f452682c620c4..f633a8f0cbdb5 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -29,13 +29,16 @@ "ecs.version": { "type": "keyword" }, - "host.id": { - "type": "keyword" - }, "@timestamp": { "type": "date", "format": "epoch_second" }, + "host.id": { + "type": "keyword" + }, + "host.arch": { + "type": "keyword" + }, "profiling": { "properties": { "project.id": { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson index 58e8281e1d32c..e164f49c4f685 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson @@ -1,4 +1,4 @@ {"create": {"_index": "profiling-hosts","_id":"eLH27YsBj2lLi3tJYlvr"}} -{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"ec2.instance_type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"profiling.host.machine":"x86_64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"ec2.placement.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} +{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"ec2.instance_type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"ec2.placement.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} {"create": {"_index": "profiling-hosts", "_id": "u_fHlYwBkmZvQ6tVo1Lr"}} -{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"azure.compute.vmsize":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.machine":"x86_64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","azure.compute.location":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} +{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"azure.compute.vmsize":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","azure.compute.location":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index d681517a7ce6f..d69178f158a88 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -53,7 +53,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { } private double getKiloWattsPerCore(HostMetadata host) { - return switch (host.profilingHostMachine) { + return switch (host.hostArchitecture) { // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, // which require "arm64" and "amd64" to be reported as the host architecture. case "arm64", "aarch64" -> customKilowattsPerCoreARM64; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java index e9f912a3f60e5..e1e3e27e951bf 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/HostMetadata.java @@ -20,22 +20,26 @@ final class HostMetadata implements ToXContentObject { static final int DEFAULT_PROFILING_NUM_CORES = 4; final String hostID; final InstanceType instanceType; - final String profilingHostMachine; // aarch64 or x86_64 + final String hostArchitecture; // arm64 or amd64, (pre-8.14.0: aarch64 or x86_64) final int profilingNumCores; // number of cores on the profiling host machine - HostMetadata(String hostID, InstanceType instanceType, String profilingHostMachine, Integer profilingNumCores) { + HostMetadata(String hostID, InstanceType instanceType, String hostArchitecture, Integer profilingNumCores) { this.hostID = hostID; this.instanceType = instanceType; - this.profilingHostMachine = profilingHostMachine; + this.hostArchitecture = hostArchitecture; this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); - String profilingHostMachine = (String) source.get("profiling.host.machine"); + String hostArchitecture = (String) source.get("host.arch"); + if (hostArchitecture == null) { + // fallback to pre-8.14.0 field name + hostArchitecture = (String) source.get("profiling.host.machine"); + } Integer profilingNumCores = (Integer) source.get("profiling.agent.config.present_cpu_cores"); - return new HostMetadata(hostID, InstanceType.fromHostSource(source), profilingHostMachine, profilingNumCores); + return new HostMetadata(hostID, InstanceType.fromHostSource(source), hostArchitecture, profilingNumCores); } return new HostMetadata("", new InstanceType("", "", ""), "", null); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index c90e0e52c4d58..738c06fa310a9 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -46,7 +46,8 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 3: Add optional component template 'profiling-ilm@custom' to all ILM-managed index templates // version 4: Added 'service.name' keyword mapping to profiling-events // version 5: Add optional component template '@custom' to all index templates that reference component templates - public static final int INDEX_TEMPLATE_VERSION = 5; + // version 6: Added 'host.arch' keyword mapping to profiling-hosts + public static final int INDEX_TEMPLATE_VERSION = 6; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 2; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java index d8f93cd129916..de32754ed69ff 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java @@ -15,7 +15,7 @@ public class HostMetadataTests extends ESTestCase { public void testCreateFromSourceAWS() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "x86_64"; final String provider = "aws"; final String region = "eu-west-1"; final String instanceType = "md5x.large"; @@ -24,7 +24,7 @@ public void testCreateFromSourceAWS() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "ec2.instance_type", instanceType, "ec2.placement.region", region ) @@ -32,7 +32,7 @@ public void testCreateFromSourceAWS() { // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals(instanceType, host.instanceType.name); @@ -40,7 +40,7 @@ public void testCreateFromSourceAWS() { public void testCreateFromSourceGCP() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "x86_64"; final String provider = "gcp"; final String[] regions = { "", "", "europe-west1", "europewest", "europe-west1" }; final String[] zones = { @@ -58,14 +58,14 @@ public void testCreateFromSourceGCP() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "gce.instance.zone", zone ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); @@ -74,7 +74,7 @@ public void testCreateFromSourceGCP() { public void testCreateFromSourceGCPZoneFuzzer() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "x86_64"; final String provider = "gcp"; final Character[] chars = new Character[] { '/', '-', 'a' }; @@ -92,14 +92,14 @@ public void testCreateFromSourceGCPZoneFuzzer() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "gce.instance.zone", zone ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertNotNull(host.instanceType.region); assertEquals("", host.instanceType.name); @@ -110,7 +110,7 @@ public void testCreateFromSourceGCPZoneFuzzer() { public void testCreateFromSourceAzure() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; final String provider = "azure"; final String region = "eastus2"; @@ -118,14 +118,14 @@ public void testCreateFromSourceAzure() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "azure.compute.location", region ) ); // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); @@ -133,7 +133,7 @@ public void testCreateFromSourceAzure() { public void testCreateFromSourceECS() { final String hostID = "1440256254710195396"; - final String machine = "x86_64"; + final String arch = "amd64"; final String provider = "any-provider"; final String region = "any-region"; @@ -141,7 +141,7 @@ public void testCreateFromSourceECS() { HostMetadata host = HostMetadata.fromSource( Map.of( "host.id", hostID, - "profiling.host.machine", machine, + "host.arch", arch, "profiling.host.tags", Arrays.asList( "cloud_provider:"+provider, "cloud_environment:qa", "cloud_region:"+region) ) @@ -149,18 +149,39 @@ public void testCreateFromSourceECS() { // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(arch, host.hostArchitecture); assertEquals(provider, host.instanceType.provider); assertEquals(region, host.instanceType.region); assertEquals("", host.instanceType.name); } public void testCreateFromSourceNoProvider() { + final String hostID = "1440256254710195396"; + final String arch = "amd64"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "host.arch", arch + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals("", host.instanceType.provider); + assertEquals("", host.instanceType.region); + assertEquals("", host.instanceType.name); + } + + public void testCreateFromSourceArchitectureFallback() { final String hostID = "1440256254710195396"; final String machine = "x86_64"; // tag::noformat HostMetadata host = HostMetadata.fromSource( + // Missing host.arch field, pre-8.14.0 architecture value Map.of( "host.id", hostID, "profiling.host.machine", machine @@ -169,7 +190,7 @@ public void testCreateFromSourceNoProvider() { // end::noformat assertEquals(hostID, host.hostID); - assertEquals(machine, host.profilingHostMachine); + assertEquals(machine, host.hostArchitecture); assertEquals("", host.instanceType.provider); assertEquals("", host.instanceType.region); assertEquals("", host.instanceType.name); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 367655ba89388..4697141bfc599 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -116,7 +116,7 @@ setup: - {"create": {"_index": "profiling-executables", "_id": "lHp5_WAgpLy2alrUVab6HA"}} - {"@timestamp": "1698624000", "Executable": {"build": {"id": "c5f89ea1c68710d2a493bb604c343a92c4f8ddeb"}, "file": {"name": "vmlinux"}}, "Symbolization": {"next_time": "4852491791"}, "ecs": {"version": "1.12.0"}} - {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} - - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "profiling.host.machine": "x86_64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } + - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } - {"index": {"_index": "test-events"}} - {"@timestamp": "1700504427", "events": ["S07KmaoGhvNte78xwwRbZQ"]} --- From 32dbc28e82728a01f6c2dfeeb5473e1f16e1f88d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 21 Mar 2024 11:32:57 +0100 Subject: [PATCH 082/214] [DOCS] Adds disclaimer to semantic search tutorials (#106590) --- .../search/search-your-data/semantic-search-elser.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc index 42c9ac4fb4357..c3eefec86e6f3 100644 --- a/docs/reference/search/search-your-data/semantic-search-elser.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-elser.asciidoc @@ -135,6 +135,11 @@ a list of relevant text passages. All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. +IMOPRTANT: The `msmarco-passagetest2019-top1000` dataset was not utilized to +train the model. It is only used in this tutorial as a sample dataset that is +easily accessible for demonstration purposes. You can use a different data set +to test the workflow and become familiar with it. + Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] in the {ml-app} UI. Assign the name `id` to the first column and `content` to From 8a7697bdc962e4265ad9248ad80adb26086517cc Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 21 Mar 2024 11:37:34 +0100 Subject: [PATCH 083/214] Improve short-circuiting downsample execution (#106563) This is relevant in the case multiple downsample api invocations have been executed for the same source index, target index and fixed interval. Whether the target index is ready, is now also checked just before starting the downsample persistent tasks. Relates to #106403 --- docs/changelog/106563.yaml | 5 ++ .../downsample/TransportDownsampleAction.java | 76 ++++++++++++------- 2 files changed, 55 insertions(+), 26 deletions(-) create mode 100644 docs/changelog/106563.yaml diff --git a/docs/changelog/106563.yaml b/docs/changelog/106563.yaml new file mode 100644 index 0000000000000..79476f909a04c --- /dev/null +++ b/docs/changelog/106563.yaml @@ -0,0 +1,5 @@ +pr: 106563 +summary: Improve short-circuiting downsample execution +area: TSDB +type: enhancement +issues: [] diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 5debe5d2edfc9..0570d93441be1 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -234,33 +234,11 @@ protected void masterOperation( } final TaskId parentTask = new TaskId(clusterService.localNode().getId(), task.getId()); - // Shortcircuit if target index has been downsampled: + // Short circuit if target index has been downsampled: final String downsampleIndexName = request.getTargetIndex(); - IndexMetadata downsampleIndex = state.getMetadata().index(downsampleIndexName); - if (downsampleIndex != null) { - var downsampleStatus = IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(downsampleIndex.getSettings()); - if (downsampleStatus == DownsampleTaskStatus.UNKNOWN) { - // This isn't a downsample index, so fail: - listener.onFailure(new ResourceAlreadyExistsException(downsampleIndex.getIndex())); - return; - } else if (downsampleStatus == DownsampleTaskStatus.SUCCESS) { - listener.onResponse(AcknowledgedResponse.TRUE); - return; - } - // In case the write block has been set on the target index means that the shard level downsampling itself was successful, - // but the previous invocation failed later performing settings update, refresh or force merge. - // The write block is used a signal to resume from the refresh part of the downsample api invocation. - if (downsampleIndex.getSettings().get(IndexMetadata.SETTING_BLOCKS_WRITE) != null) { - var refreshRequest = new RefreshRequest(downsampleIndexName); - refreshRequest.setParentTask(parentTask); - client.admin() - .indices() - .refresh( - refreshRequest, - new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, request.getWaitTimeout()) - ); - return; - } + if (canShortCircuit(downsampleIndexName, parentTask, request.getWaitTimeout(), state.metadata(), listener)) { + logger.info("Skipping downsampling, because a previous execution already completed downsampling"); + return; } try { MetadataCreateIndexService.validateIndexName(downsampleIndexName, state); @@ -356,6 +334,11 @@ protected void masterOperation( } }, e -> { if (e instanceof ResourceAlreadyExistsException) { + var metadata = clusterService.state().metadata(); + if (canShortCircuit(request.getTargetIndex(), parentTask, request.getWaitTimeout(), metadata, listener)) { + logger.info("Downsample tasks are not created, because a previous execution already completed downsampling"); + return; + } performShardDownsampling( request, delegate, @@ -374,6 +357,47 @@ protected void masterOperation( })); } + /** + * Shortcircuit when another downsample api invocation already completed successfully. + */ + private boolean canShortCircuit( + String targetIndexName, + TaskId parentTask, + TimeValue waitTimeout, + Metadata metadata, + ActionListener listener + ) { + IndexMetadata targetIndexMetadata = metadata.index(targetIndexName); + if (targetIndexMetadata == null) { + return false; + } + + var downsampleStatus = IndexMetadata.INDEX_DOWNSAMPLE_STATUS.get(targetIndexMetadata.getSettings()); + if (downsampleStatus == DownsampleTaskStatus.UNKNOWN) { + // This isn't a downsample index, so fail: + listener.onFailure(new ResourceAlreadyExistsException(targetIndexMetadata.getIndex())); + return true; + } else if (downsampleStatus == DownsampleTaskStatus.SUCCESS) { + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // In case the write block has been set on the target index means that the shard level downsampling itself was successful, + // but the previous invocation failed later performing settings update, refresh or force merge. + // The write block is used a signal to resume from the refresh part of the downsample api invocation. + if (targetIndexMetadata.getSettings().get(IndexMetadata.SETTING_BLOCKS_WRITE) != null) { + var refreshRequest = new RefreshRequest(targetIndexMetadata.getIndex().getName()); + refreshRequest.setParentTask(parentTask); + client.admin() + .indices() + .refresh( + refreshRequest, + new RefreshDownsampleIndexActionListener(listener, parentTask, targetIndexMetadata.getIndex().getName(), waitTimeout) + ); + return true; + } + return false; + } + // 3. downsample index created or already exist (in case of retry). Run downsample indexer persistent task on each shard. private void performShardDownsampling( DownsampleAction.Request request, From e1e14e45e445f68aa1471193b0fb04a174de1acf Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 21 Mar 2024 22:29:20 +1100 Subject: [PATCH 084/214] Finer grained blobStore stats report in stateless (#106581) When stateless is enabled, the blobStore stats report keep the information about operation purposes by using combined keys of operation purpose and operation. Resolves: ES-7967 --- .../s3/S3BlobStoreRepositoryTests.java | 21 +++++++++++++++++ .../repositories/s3/S3BlobStore.java | 23 ++++++++++++++----- .../repositories/s3/S3Service.java | 3 +++ 3 files changed, 41 insertions(+), 6 deletions(-) diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 4080a47c7dabe..94cfce5357857 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -344,6 +344,27 @@ public void testRequestStatsWithOperationPurposes() throws IOException { assertThat(newStats.keySet(), equalTo(allOperations)); assertThat(newStats, not(equalTo(initialStats))); + // Exercise stats report that keep find grained information + final Map fineStats = statsCollectors.statsMap(true); + assertThat( + fineStats.keySet(), + equalTo( + statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::toString).collect(Collectors.toUnmodifiableSet()) + ) + ); + // fine stats are equal to coarse grained stats (without entries with value 0) by aggregation + assertThat( + fineStats.entrySet() + .stream() + .collect(Collectors.groupingBy(entry -> entry.getKey().split("_", 2)[1], Collectors.summingLong(Map.Entry::getValue))), + equalTo( + newStats.entrySet() + .stream() + .filter(entry -> entry.getValue() != 0L) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)) + ) + ); + final Set operationsSeenForTheNewPurpose = statsCollectors.collectors.keySet() .stream() .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT_METADATA) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 6b9937b01a433..895f5273dbba0 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -376,7 +376,7 @@ public void close() throws IOException { @Override public Map stats() { - return statsCollectors.statsMap(); + return statsCollectors.statsMap(service.isStateless); } // Package private for testing @@ -461,7 +461,12 @@ static Operation parse(String s) { } } - record StatsKey(Operation operation, OperationPurpose purpose) {} + record StatsKey(Operation operation, OperationPurpose purpose) { + @Override + public String toString() { + return purpose.getKey() + "_" + operation.getKey(); + } + } class StatsCollectors { final Map collectors = new ConcurrentHashMap<>(); @@ -470,10 +475,16 @@ RequestMetricCollector getMetricCollector(Operation operation, OperationPurpose return collectors.computeIfAbsent(new StatsKey(operation, purpose), k -> buildMetricCollector(k.operation(), k.purpose())); } - Map statsMap() { - final Map m = Arrays.stream(Operation.values()).collect(Collectors.toMap(Operation::getKey, e -> 0L)); - collectors.forEach((sk, v) -> m.compute(sk.operation().getKey(), (k, c) -> Objects.requireNonNull(c) + v.counter.sum())); - return Map.copyOf(m); + Map statsMap(boolean isStateless) { + if (isStateless) { + return collectors.entrySet() + .stream() + .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> entry.getValue().counter.sum())); + } else { + final Map m = Arrays.stream(Operation.values()).collect(Collectors.toMap(Operation::getKey, e -> 0L)); + collectors.forEach((sk, v) -> m.compute(sk.operation().getKey(), (k, c) -> Objects.requireNonNull(c) + v.counter.sum())); + return Map.copyOf(m); + } } IgnoreNoResponseMetricsCollector buildMetricCollector(Operation operation, OperationPurpose purpose) { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index fc58482651fa3..c8a7cc12a90f4 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService; import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -92,6 +93,7 @@ class S3Service implements Closeable { final TimeValue compareAndExchangeTimeToLive; final TimeValue compareAndExchangeAntiContentionDelay; + final boolean isStateless; S3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { webIdentityTokenCredentialsProvider = new CustomWebIdentityTokenCredentialsProvider( @@ -103,6 +105,7 @@ class S3Service implements Closeable { ); compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); + isStateless = DiscoveryNode.isStateless(nodeSettings); } /** From ffec5f6fced975608d3ea41b0bef79e7be4ab0e3 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 21 Mar 2024 13:17:19 +0100 Subject: [PATCH 085/214] Fix synthesizing id bug when reading routing value (#106600) Take `routingHashBytes` offset and length into account when decoding the routing hash as part of synthesizing the _id. --- .../java/org/elasticsearch/index/mapper/IdLoader.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java index ef15af93f6e34..b8e52667894bb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdLoader.java @@ -93,6 +93,9 @@ public IdLoader.Leaf leaf(LeafStoredFieldLoader loader, LeafReader reader, int[] // Each document always has exactly one tsid and one timestamp: SortedDocValues tsIdDocValues = DocValues.getSorted(reader, TimeSeriesIdFieldMapper.NAME); SortedNumericDocValues timestampDocValues = DocValues.getSortedNumeric(reader, DataStream.TIMESTAMP_FIELD_NAME); + SortedDocValues routingHashDocValues = builders == null + ? DocValues.getSorted(reader, TimeSeriesRoutingHashFieldMapper.NAME) + : null; for (int i = 0; i < docIdsInLeaf.length; i++) { int docId = docIdsInLeaf[i]; @@ -107,11 +110,12 @@ public IdLoader.Leaf leaf(LeafStoredFieldLoader loader, LeafReader reader, int[] var routingBuilder = builders[i]; ids[i] = TsidExtractingIdFieldMapper.createId(false, routingBuilder, tsid, timestamp, new byte[16]); } else { - SortedDocValues routingHashDocValues = DocValues.getSorted(reader, TimeSeriesRoutingHashFieldMapper.NAME); found = routingHashDocValues.advanceExact(docId); assert found; BytesRef routingHashBytes = routingHashDocValues.lookupOrd(routingHashDocValues.ordValue()); - int routingHash = TimeSeriesRoutingHashFieldMapper.decode(Uid.decodeId(routingHashBytes.bytes)); + int routingHash = TimeSeriesRoutingHashFieldMapper.decode( + Uid.decodeId(routingHashBytes.bytes, routingHashBytes.offset, routingHashBytes.length) + ); ids[i] = TsidExtractingIdFieldMapper.createId(routingHash, tsid, timestamp); } } From 999dcb8d08befdd94d149e06e806c6517d58b193 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Thu, 21 Mar 2024 13:35:17 +0100 Subject: [PATCH 086/214] Unable to retrieve multiple stored field values (#106575) The issue happens when we try to use multiple stored fields through the FetchFieldsPhase, which we do when using `_fields` since we have a single shared instance of SingleFieldsVisitor per field and document and use a shared `currentValues` array. --- docs/changelog/106575.yaml | 5 ++ .../test/runtime_fields/10_keyword.yml | 70 +++++++++++++++++ .../lookup/LeafFieldLookupProvider.java | 21 ++--- .../subphase/PopulateFieldLookupTests.java | 77 +++++++++++++++++++ 4 files changed, 159 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/106575.yaml create mode 100644 server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java diff --git a/docs/changelog/106575.yaml b/docs/changelog/106575.yaml new file mode 100644 index 0000000000000..fb5230a9edb3d --- /dev/null +++ b/docs/changelog/106575.yaml @@ -0,0 +1,5 @@ +pr: 106575 +summary: Unable to retrieve multiple stored field values +area: "Search" +type: bug +issues: [] diff --git a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml index bd5c45823aaae..e8bf1f84e6437 100644 --- a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml +++ b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml @@ -108,6 +108,76 @@ setup: - match: {hits.hits.0.fields.day_of_week_letters: [T, a, d, h, r, s, u, y] } - match: {hits.hits.0.fields.prefixed_node: [node_c] } +--- +"fetch multiple stored fields": + - skip: + version: " - 8.13.99" + reason: "bug fixed in 8.14" + + - do: + indices.create: + index: sensor-test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + runtime: + prefixed_node: + type: keyword + script: + source: | + for (String node : params._fields.node.values) { + emit(params.prefix + node); + } + params: + prefix: node_ + prefixed_region: + type: keyword + script: + source: | + for (String region : params._fields.region.values) { + emit(params.prefix + region) + } + params: + prefix: us- + properties: + timestamp: + type: date + node: + type: keyword + store: true + region: + type: keyword + store: true + + - do: + bulk: + index: sensor-test + refresh: true + body: | + {"index":{}} + {"timestamp": 1516729294000, "node": "a", "region": "west-1" } + {"index":{}} + {"timestamp": 1516642894000, "node": "b", "region": "west-2" } + {"index":{}} + {"timestamp": 1516556494000, "node": "a", "region": "west-1"} + {"index":{}} + {"timestamp": 1516470094000, "node": "b", "region": "west-2"} + {"index":{}} + {"timestamp": 1516383694000, "node": "c", "region": "west-2"} + {"index":{}} + {"timestamp": 1516297294000, "node": "c", "region": "west-2"} + - do: + search: + index: sensor-test + body: + sort: timestamp + fields: [prefixed_node, prefixed_region] + - match: {hits.total.value: 6} + - match: {hits.hits.0.fields.prefixed_node: [node_c] } + - match: {hits.hits.0.fields.prefixed_region: [us-west-2]} + --- "docvalue_fields": - do: diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldLookupProvider.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldLookupProvider.java index a17bd1f2d26e5..44b317023663f 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldLookupProvider.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldLookupProvider.java @@ -34,27 +34,20 @@ static Function fromStoredFields() { return ctx -> new LeafFieldLookupProvider() { StoredFields storedFields; - int currentDoc = -1; - final List currentValues = new ArrayList<>(2); @Override public void populateFieldLookup(FieldLookup fieldLookup, int doc) throws IOException { if (storedFields == null) { storedFields = ctx.reader().storedFields(); } - if (doc == currentDoc) { - fieldLookup.setValues(currentValues); - } else { - currentDoc = doc; - currentValues.clear(); - // TODO can we remember which fields have been loaded here and get them eagerly next time? - // likelihood is if a script is loading several fields on one doc they will load the same - // set of fields next time round - SingleFieldsVisitor visitor = new SingleFieldsVisitor(fieldLookup.fieldType(), currentValues); - storedFields.document(doc, visitor); - fieldLookup.setValues(currentValues); - } + // TODO can we remember which fields have been loaded here and get them eagerly next time? + // likelihood is if a script is loading several fields on one doc they will load the same + // set of fields next time round + final List currentValues = new ArrayList<>(2); + storedFields.document(doc, new SingleFieldsVisitor(fieldLookup.fieldType(), currentValues)); + fieldLookup.setValues(currentValues); } + }; } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java new file mode 100644 index 0000000000000..f9e0fcf114cdc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/PopulateFieldLookupTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.search.lookup.FieldLookup; +import org.elasticsearch.search.lookup.LeafFieldLookupProvider; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.List; + +public class PopulateFieldLookupTests extends MapperServiceTestCase { + public void testPopulateFieldLookup() throws IOException { + final XContentBuilder mapping = createMapping(); + final MapperService mapperService = createMapperService(mapping); + withLuceneIndex(mapperService, iw -> { + final Document doc = new Document(); + doc.add(new StoredField("integer", 101)); + doc.add(new StoredField("keyword", new BytesRef("foobar"))); + iw.addDocument(doc); + }, reader -> { + final StoredFields storedFields = reader.storedFields(); + final Document document = storedFields.document(0); + final List documentFields = document.getFields().stream().map(IndexableField::name).toList(); + assertThat(documentFields, Matchers.containsInAnyOrder("integer", "keyword")); + + final IndexSearcher searcher = newSearcher(reader); + final LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0); + final LeafFieldLookupProvider provider = LeafFieldLookupProvider.fromStoredFields().apply(readerContext); + final FieldLookup integerFieldLookup = new FieldLookup(mapperService.fieldType("integer")); + final FieldLookup keywordFieldLookup = new FieldLookup(mapperService.fieldType("keyword")); + provider.populateFieldLookup(integerFieldLookup, 0); + provider.populateFieldLookup(keywordFieldLookup, 0); + assertEquals(List.of(101), integerFieldLookup.getValues()); + assertEquals(List.of("foobar"), keywordFieldLookup.getValues()); + }); + } + + private static XContentBuilder createMapping() throws IOException { + final XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc"); + { + mapping.startObject("properties"); + { + mapping.startObject("integer"); + { + mapping.field("type", "integer").field("store", "true"); + } + mapping.endObject(); + mapping.startObject("keyword"); + { + mapping.field("type", "keyword").field("store", "true"); + } + mapping.endObject(); + } + mapping.endObject(); + + } + return mapping.endObject().endObject(); + } +} From e92a7a71680c333cb791fb95b07d8232272a0783 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 21 Mar 2024 14:33:10 +0100 Subject: [PATCH 087/214] [DOCS] Adds note to inference tutorial about similarity (#106567) --- .../inference-api/infer-api-task.asciidoc | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index b39554bb2c83f..7c6e750138c1e 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -22,6 +22,13 @@ key. <3> The name of the embedding model to use. You can find the list of Cohere embedding models https://docs.cohere.com/reference/embed[here]. +NOTE: When using this model the recommended similarity measure to use in the +`dense_vector` field mapping is `dot_product`. In the case of Cohere models, the +embeddings are normalized to unit length in which case the `dot_product` and +the `cosine` measures are equivalent. + + + // end::cohere[] @@ -35,8 +42,6 @@ PUT _inference/text_embedding/openai_embeddings <1> "service_settings": { "api_key": "", <2> "model_id": "text-embedding-ada-002" <3> - }, - "task_settings": { } } ------------------------------------------------------------ @@ -51,4 +56,9 @@ key. embedding models https://platform.openai.com/docs/guides/embeddings/embedding-models[here]. +NOTE: When using this model the recommended similarity measure to use in the +`dense_vector` field mapping is `dot_product`. In the case of OpenAI models, the +embeddings are normalized to unit length in which case the `dot_product` and +the `cosine` measures are equivalent. + // end::openai[] \ No newline at end of file From 26f726917b12ec456d693d42a43469035cb704a9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Mar 2024 09:46:23 -0400 Subject: [PATCH 088/214] ESQL: Escape regexes in tests (#106603) When we're generating raw strings to match we want to make sure it just matches exactly the string - so we should escape it. If we don't we'll get weird failures from syntax exceptions. Closes #106533 Closes #106538 --- .../function/scalar/string/RLikeTests.java | 43 ++++++++++--------- .../scalar/string/WildcardLikeTests.java | 7 ++- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 0e0fdc7cc47d6..da8af4e57636c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; @@ -25,13 +24,13 @@ import java.util.ArrayList; import java.util.List; +import java.util.function.Function; import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106533") public class RLikeTests extends AbstractFunctionTestCase { public RLikeTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -39,10 +38,15 @@ public RLikeTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - return parameters(() -> randomAlphaOfLength(1) + "?"); + return parameters(str -> { + for (String syntax : new String[] { "\\", ".", "?", "+", "*", "|", "{", "}", "[", "]", "(", ")", "\"", "<", ">" }) { + str = str.replace(syntax, "\\" + syntax); + } + return str; + }, () -> randomAlphaOfLength(1) + "?"); } - static Iterable parameters(Supplier optionalPattern) { + static Iterable parameters(Function escapeString, Supplier optionalPattern) { List cases = new ArrayList<>(); cases.add( new TestCaseSupplier( @@ -60,12 +64,12 @@ static Iterable parameters(Supplier optionalPattern) { ) ) ); - casesForString(cases, "empty string", () -> "", false, optionalPattern); - casesForString(cases, "single ascii character", () -> randomAlphaOfLength(1), true, optionalPattern); - casesForString(cases, "ascii string", () -> randomAlphaOfLengthBetween(2, 100), true, optionalPattern); - casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, optionalPattern); - casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, optionalPattern); - casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, optionalPattern); + casesForString(cases, "empty string", () -> "", false, escapeString, optionalPattern); + casesForString(cases, "single ascii character", () -> randomAlphaOfLength(1), true, escapeString, optionalPattern); + casesForString(cases, "ascii string", () -> randomAlphaOfLengthBetween(2, 100), true, escapeString, optionalPattern); + casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, escapeString, optionalPattern); + casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, escapeString, optionalPattern); + casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, escapeString, optionalPattern); for (DataType type : EsqlDataTypes.types()) { if (type == DataTypes.KEYWORD || type == DataTypes.TEXT || type == DataTypes.NULL) { continue; @@ -98,24 +102,25 @@ private static void casesForString( String title, Supplier textSupplier, boolean canGenerateDifferent, + Function escapeString, Supplier optionalPattern ) { cases(cases, title + " matches self", () -> { String text = textSupplier.get(); - return new TextAndPattern(text, text); + return new TextAndPattern(text, escapeString.apply(text)); }, true); cases(cases, title + " doesn't match self with trailing", () -> { String text = textSupplier.get(); - return new TextAndPattern(text, text + randomAlphaOfLength(1)); + return new TextAndPattern(text, escapeString.apply(text) + randomAlphaOfLength(1)); }, false); cases(cases, title + " matches self with optional trailing", () -> { String text = randomAlphaOfLength(1); - return new TextAndPattern(text, text + optionalPattern.get()); + return new TextAndPattern(text, escapeString.apply(text) + optionalPattern.get()); }, true); if (canGenerateDifferent) { cases(cases, title + " doesn't match different", () -> { String text = textSupplier.get(); - String different = randomValueOtherThan(text, textSupplier); + String different = escapeString.apply(randomValueOtherThan(text, textSupplier)); return new TextAndPattern(text, different); }, false); } @@ -149,11 +154,9 @@ protected Expression build(Source source, List args) { Expression expression = args.get(0); Literal pattern = (Literal) args.get(1); Literal caseInsensitive = (Literal) args.get(2); - return new RLike( - source, - expression, - new RLikePattern(((BytesRef) pattern.fold()).utf8ToString()), - (Boolean) caseInsensitive.fold() - ); + String patternString = ((BytesRef) pattern.fold()).utf8ToString(); + boolean caseInsensitiveBool = (boolean) caseInsensitive.fold(); + logger.info("pattern={} caseInsensitive={}", patternString, caseInsensitiveBool); + return new RLike(source, expression, new RLikePattern(patternString), caseInsensitiveBool); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index e70a57cfd5f0e..6377be0655614 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -31,7 +31,12 @@ public WildcardLikeTests(@Name("TestCase") Supplier t @ParametersFactory public static Iterable parameters() { - return RLikeTests.parameters(() -> "*"); + return RLikeTests.parameters(str -> { + for (String syntax : new String[] { "\\", ".", "*" }) { + str = str.replace(syntax, "\\" + syntax); + } + return str; + }, () -> "*"); } @Override From dff4fd46a5f7642767ef64e9f803dc69e8a775fd Mon Sep 17 00:00:00 2001 From: Rassyan Date: Thu, 21 Mar 2024 21:50:14 +0800 Subject: [PATCH 089/214] Log AffixSetting update when using addAffixMapUpdateConsumer (#97072) --- docs/changelog/97072.yaml | 5 ++++ .../common/settings/Setting.java | 24 ++++++++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/97072.yaml diff --git a/docs/changelog/97072.yaml b/docs/changelog/97072.yaml new file mode 100644 index 0000000000000..686b30952b646 --- /dev/null +++ b/docs/changelog/97072.yaml @@ -0,0 +1,5 @@ +pr: 97072 +summary: Log when update AffixSetting using addAffixMapUpdateConsumer +area: Infra/Logging +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index aa1c25a3f1952..aaedf0f8d8874 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -626,6 +626,13 @@ String innerGetRaw(final Settings settings) { return defaultValue.apply(settings); } + /** + * Returns the raw (string) settings value, which is for logging use + */ + String getLogString(final Settings settings) { + return getRaw(settings); + } + /** Logs a deprecation warning if the setting is deprecated and used. */ void checkDeprecation(Settings settings) { // They're using the setting, so we need to tell them to stop @@ -989,6 +996,7 @@ public Map getValue(Settings current, Settings previous) { @Override public void apply(Map value, Settings current, Settings previous) { + Setting.logSettingUpdate(AffixSetting.this, current, previous, logger); consumer.accept(value); } }; @@ -1008,6 +1016,20 @@ public String innerGetRaw(final Settings settings) { ); } + @Override + String getLogString(final Settings settings) { + Settings filteredAffixSetting = settings.filter(this::match); + try { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + filteredAffixSetting.toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); + builder.endObject(); + return Strings.toString(builder); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + @Override public Setting getConcreteSetting(String key) { if (match(key)) { @@ -1811,7 +1833,7 @@ static void logSettingUpdate(Setting setting, Settings current, Settings prev if (setting.isFiltered()) { logger.info("updating [{}]", setting.key); } else { - logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getRaw(previous), setting.getRaw(current)); + logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getLogString(previous), setting.getLogString(current)); } } } From 11a4e4029a88714717f842ab74aba054aff4977e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 21 Mar 2024 14:57:10 +0100 Subject: [PATCH 090/214] [DOCS] Adjusts PUT inference API docs examples (#106604) Co-authored-by: David Kyle --- .../inference/put-inference.asciidoc | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 0aeb478dcafd2..9d5e187f5994a 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -163,6 +163,12 @@ creating the {infer} model, you cannot change the associated API key. If you want to use a different API key, delete the {infer} model and recreate it with the same name and the updated API key. +`model_id`::: +(Optional, string) +The name of the model to use for the {infer} task. Refer to the +https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] +for the list of available text embedding models. + `organization_id`::: (Optional, string) The unique identifier of your organization. You can find the Organization ID in @@ -216,13 +222,6 @@ Valid values are: * `search`: use it for storing embeddings of search queries run against a vector data base to find relevant documents. -`model`::: -(Optional, string) -For `openai` sevice only. The name of the model to use for the {infer} task. Refer -to the -https://platform.openai.com/docs/guides/embeddings/what-are-embeddings[OpenAI documentation] -for the list of available text embedding models. - `truncate`::: (Optional, string) For `cohere` service only. Specifies how the API handles inputs longer than the @@ -257,7 +256,7 @@ PUT _inference/text_embedding/cohere-embeddings "service": "cohere", "service_settings": { "api_key": "", - "model": "embed-english-light-v3.0", + "model_id": "embed-english-light-v3.0", "embedding_type": "byte" } } @@ -305,8 +304,7 @@ PUT _inference/sparse_embedding/my-elser-model "service_settings": { "num_allocations": 1, "num_threads": 1 - }, - "task_settings": {} + } } ------------------------------------------------------------ // TEST[skip:TBD] @@ -398,10 +396,8 @@ PUT _inference/text_embedding/openai_embeddings { "service": "openai", "service_settings": { - "api_key": "" - }, - "task_settings": { - "model": "text-embedding-ada-002" + "api_key": "", + "model_id": "text-embedding-ada-002" } } ------------------------------------------------------------ From ad4738265fc933d9d62d13488b6d1a39414687cc Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 21 Mar 2024 15:06:54 +0100 Subject: [PATCH 091/214] Wait in SimpleThreadPoolIT.testThreadPoolMetrics for threads to complete (#106580) Search threads might still be active even after returning a response. Waiting for all threads to be completed prevents a race between the collection of threadpool stats and measurements. --- .../threadpool/SimpleThreadPoolIT.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index d987db5b5466f..50d5dbdeca71b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -39,7 +39,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesRegex; @@ -151,6 +150,8 @@ public void testThreadPoolMetrics() throws Exception { assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); } final var tp = internalCluster().getInstance(ThreadPool.class, dataNodeName); + // wait for all threads to complete so that we get deterministic results + waitUntil(() -> tp.stats().stats().stream().allMatch(s -> s.active() == 0)); ThreadPoolStats tps = tp.stats(); plugin.collect(); ArrayList registeredMetrics = plugin.getRegisteredMetrics(InstrumentType.LONG_GAUGE); @@ -163,13 +164,13 @@ public void testThreadPoolMetrics() throws Exception { Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_CURRENT, (long) stats.threads()), Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_LARGEST, (long) stats.largest()), Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_QUEUE, (long) stats.queue()) - ).stream().collect(toUnmodifiableSortedMap(Entry::getKey, Entry::getValue)); + ).stream().collect(toUnmodifiableSortedMap(e -> stats.name() + e.getKey(), Entry::getValue)); Function> measurementExtractor = name -> { - String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + stats.name() + name; + String metricName = ThreadPool.THREAD_POOL_METRIC_PREFIX + name; assertThat(metricName, in(registeredMetrics)); - List measurements = name.equals(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED) + List measurements = name.endsWith(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED) ? plugin.getLongAsyncCounterMeasurement(metricName) : plugin.getLongGaugeMeasurement(metricName); return measurements.stream().map(Measurement::getLong).toList(); @@ -182,9 +183,7 @@ public void testThreadPoolMetrics() throws Exception { logger.info("Stats of `{}`: {}", stats.name(), threadPoolStats); logger.info("Measurements of `{}`: {}", stats.name(), measurements); - threadPoolStats.forEach( - (metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(greaterThanOrEqualTo(value)))) - ); + threadPoolStats.forEach((metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(equalTo(value))))); }); } From dcc3d830eaea1a1c83dec818d367f3b00a3b083b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Mar 2024 07:20:56 -0700 Subject: [PATCH 092/214] [DOCS] Unhealthy transform rule check (#106521) --- .../images/transform-alert-actions.png | Bin 195127 -> 198607 bytes .../images/transform-check-config.png | Bin 197175 -> 154179 bytes .../transform/images/transform-rule.png | Bin 81389 -> 83160 bytes .../transform/transform-alerts.asciidoc | 29 ++++++++++++------ 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/docs/reference/transform/images/transform-alert-actions.png b/docs/reference/transform/images/transform-alert-actions.png index a78c02fa305cd0a09a91358d8e63ac283b821d81..e171a5c60cbfb02929222d4928b1fc4f026bf6b8 100644 GIT binary patch literal 198607 zcmeFZXIN8Rw>GLESP)PV5Rk4Q(v%2D2a#T-cT`H~QF;hPGnKO41{1TADkDpxc zQ39xc^APy{I(WH?y2Vg~j+f`z$7VPAj4bBm z<`Wwb@t1Gw`b2;eMjR$*wjS5a&=b2{0TnwrIJJmbpQsS36Io5~d)WaO%R~pHECc4v zG$OCH4SR&`Ml9DEh(Q$S8Xx?k{+3Y3Uhf{*`~?_#xb`l>?Bz>4H@oU2SN~fcmn`-r zkSrND9^DsUbAB6Iyl{;=G32`%n}Cr0{gJ#I$szGVPa+q~;x5MJFK{FO!lkijbe)Y2 zoc5Tz21TX(mCZMBSxqIJA2Xar+61uyg*eMLV_eTS8?N+dp)&&~nz(J9C$EeBsR5 zDAzORC`V^0A9l*;%o(a2s{i@r+*l6vzmKthpA6)`NBK+TGg^;TOajktpw90Caz;tV(O=9z+G#=N2c9kQcbsFUH8~SZwde^Q|UEq0j=C`l!x8juwtV|xA zIeYGk@^4=nuU@fppZn99b3d=}Izpg{o|My zigwukG@LRI!&xe~mrtKC|8bU7Zo#yFnEbgbZ&hEtvbt$uqV}h`P#z8T$Kn2e`)GGA zs9>&IGjy3cskOOg|3+QyR59i{ zyArb%G)@eRCo={|DQ)%};QrqKmjP1F zO4p=$JwocZe_ z6Ch9G*-t?>Hx2$j#DMS?{&?gp*CEJD7ELA*b&?7M`?)iudqS*&gck*qx_i)rsHeEW ztlepxec#V69Y=JNU$xt~+i=Sz=02T#?UX$A82-GaY0*6YO)HG^YP}M>q}4>FQbYV# zI5t$U>BVdN5$lJQE!EA|$x30NjmvALc`xR_d10FIG4V>L$D?RC2MrxdvalFlAnK;~ zTg3ZcT%le~yDJBqsQhX+)$tP3fsMA#rTyi?A5Szm*{Zlf&xWRI2Vb{5YRqdB3flUzJLiN9b6#01O~k}-gr%|pocpYN$|uS!g-gvV zrOd1c>PqfVMo;QwRMIWUAOeu`3eJTVae4~BSxHkb+=kUKb3@a5@8z}BY6XG93#DifMZfP<@|q{Ui=}~WT((rgWMcx^6QTeZuWWEtfK}} zMQC19imI`yg{ldB#ie3`_9R_jPNapZd1Ys#+$-C8LG62Xy10Ula^Z>0B_9_Mi6BsRt3JU(lGkpo~;)`)wl&kzFbgZXn9}fHj!L<8t}&D#jQJf zZ+-j>H-R&?mHBG@QoBEgp53R(tVM{opWHRK4JqGmIZ;0(;mgxgl2`l13uSNzBmI@% zgS`Olu-mO(4VD#nKjQ(+nZvPBr;)ss(mJ^C%RHO$} z{5{g~ZiOLPdOgKs|N1(`+G`}vP65HYY%^_JrJ>`LNq)&@(fu%Ak1V%_|A>)Bvsb|aFJU34 z)RxtbuJU@-Wn1=PWYJ0cFk@YEs<^qBn}V(3vtJw_wer-WODfRL$X0g3m2#_W(U2gl z*rY8)@4Iz`d3u+CGjN+N;K_Lvn%~?<(=FVF$#4|s8L~aE5k9p!W4oxx)9y0-dE_L~ zKip3vhMj+yMJa?(H(wpv*6!vL z;pj*|rs`6_p|7*KdZHp}YdDHKxg&zMH%E|*)%F#S)yTBV{AvcMAPU$>+7xRuG!r4| zMYb#IdbKX8H=eegCV_o`+PMqtO@7~`f3|W;wkVK(!*22ERE+{T1$5MpwddjBdIe(U z*ELy^3np%lx?ghYU_OeUy5Da&LW`+e8_1+wYSWm=)LUwl|NgSX*?O+YWAQIpVrS;` znjRH|fcIKa^|Pcc(YOF>q~~Jq+}AwXQT31q-_n-MYhA7uLbAxjIclP8O%op{z6RO$ z)?~xFARK8>s2g8e(PSeV&}1UkT$a>wib0L6AFT(kqXw4x&8PHmwji09>t|YPRUtwq zMTVLg&%56LQ1(F_C)zD2om>bt%dqFz{SCJ91YB55e>Jmm-+{XFEp0fvw5(X(@&2d* z-83UM?cTHJfzamD&K7R92rre@<%}S$&d8o4mUWMs98-14xmUP5Kcf+^1Ib-Pf2*aN&^&uqx z;c&5X>YO3c?PaVy@t95mxW%}OR61?w@>?5Y30d_}rH@pn8*HSX z{taP=+Xxxnlg=E+!NS;f9?WDew3igpIzEX1nYz#R{AlPfNniH9?amRQ1F0{!_hH$n z$+y_ovxO~=0GVpeFhL~@mUmeKHPc4&`D5Xxr8QYOCWiT<*7ce)fISiDqBPSWsv?N6 zhCvshfSE6wR!VYJw9ErHm3Tbg7}9^0^qhH|{=iiPnC3HSZLRg2qrXyKsV%=wxS=2r z=x3mCn|{FKaJ|yEa)HXLzxJ|94Mxi2LbjS&xz)Dk+j^{HXq>Ot(ai>zV(r|qm@u(z+4;xJ1w)*~4$X1JWtkS%R4E{KJDaG7A zHA^nwXhM+bw^+LysoaM&995n!goSYJxJkMthC(gB)1ES~m3(5H4Krwp8VB7&%F<~w z3#&8Sp{dh>9uH(&zqynU@ifC{Tjj*G)GR?dvYpS|2CNs^;P>fLNUO71V921nQh9M& zqxv7?>MmeRz~JOTf5l8Jr<~)_KHKUMywtit_#RuaB1CAUadW1wAD^tc>%Td}SEC?O2v5 zoh)2jJ*u=h{Nf9i;y-@&OG?QU!=GiHhbx`li>@zv)TNyO{4yO{y=x1E#&An(W5S4f z!zg9IkwFT=49ec&+V1J6qD-jv6 zZAGBz5RCO?Z9q0d8n1D99NkN2yYdisa?c=?Opq=%uk`Y#&sg}a_*K4kAGGca)pc{9_e7;QaObDDOD5gLbA*(YVA~fe_6M@-ey4%oZh@E6aF0Pd zm$ik>%5LSb{H3b+ziW_26P|o(H;2zdfsA%4>!Oc1O;m&gXYBl-{J^jdo9z6!!Rov$`C7GOAt3KtW~*PFfN+ zdu+q=c_Du@1m&c$%xbTna<~_k$SGIfU+;->qJXz*zjY}b`E#^bJO%HvNL;2r@mrfx zSsN?flouDx8>-nOPLyXWNc~pg^N3%_^HBCAtVv3^A7p-YAERW~-3>qlb$=(Fm@8Jx zIaEnl9yQJFNfkhSL}a3`c9A{)(wR41T-XaAZTC6oJOzOJfZmg6V!E{&ck*kR(YFLC zXr6S3aPv#I#g^y%s#*mFnSxB@SlE9W76mnD$HC15T$}EMt{+On|gmw3xMta^Xqh z##IX?=xZ0;!S}C%P93jdiyr{`(nzTAUEzU&+N=EPeMj^hRLzy9H3bhmXKsCwFQprl z!`pyc+D`{$B|Wg8YA*U8#8V=%irYjHZ)w)6O%L!|F^-Qx4;lT}Z}lixhsTN9jb9@+ zHZWgbiCp`3Y5vX8Mi~STxhIc8bIbYLXew_$0-&^k6(L8{IIpbj7#bzA$kF@Ts~d2J zP1K>8!fx+O+o6;(jF|5*?CaYal*UlhbvDb}r__{xU0>hGyPvC++}5 zof@#6g|qRSOG{209~qb4UJy?2_WqFJwQ*Y-GX#K^SlZ$8lJt||m|#)f9yZC{5ZuJj zU%u*szvZ`hBPt;_L()oaFC3EXx4g>MbOBfE#axA68Fm;+;r8)WIP~p1Cqv1*t9_rH zR?2E7?KuP#5H-!8Z`+=m&pvGywvsG7E~KeKJF}MqT0?w){`Jz^Kf30#DM6I`V3*E2 zHLZ);^$86&((>c<&oV$(j+Htr#Q)ol9)ghVl3nNjoFz7~;`bMoMq?zFQ|~&;UF~tW zt32w1(5<|7Tl*YT5;5v!9mwHa#Vhom z6MqMh_)CkV$-EZGk)_Y_(dC*t_sJi$HET;v1df5rQ@9&P`$nvoQRZK!f!?fP1>S#84J7obUIX$Ud-dtjSKSZte z``b^Y^h1~OOA(E~C1r}&I$H58c^wYyvyGnpEol9n&(bK3R_I0f^gms@s6c6%g7N7- z-M`)R|6ciDVBr5+=>PiAzt+b87f%R(-AGP8B@?l;q3<)f|FujjUSZoIhr3HY{r_2P z@d%&Woe3DE^zQaZs(iwVclO`?S7uU{pbI=z7;v}uN<3`3C-py-B@g^kV0+k_%<=&~ zhf-OxgbufG4{ADLf;|2Ouj4t+ZL~uUY>_{jvp5D5eEuoxG!?_Q&!iLOTmmh3V-!!W z=6fh3AkQ{j&o3P=p`=>=`|=FR<=_Ja=`0T1>wjm+_)IF5vw#dd*~@J&aZmR-x2d9! zkq|MyYMU2AWW4$CiuVDgI-bh^Um>^m4b=fwfmBiWQAx@D%F;I-gwVgcWm>ndaNXGp zJx?!Ahczf+diV045VPUl$;%uZkmz{@u?6XGL1DZRjM`NbbZ!R!4T@!M&k;cuUcbI2 z8fV{R{htD?H=3#{G_01e(QI|-iJD7?i?n!^$Q0@-BS`08qZt_UTu<5)k>_R@>}W~Z zZme&1{typ~1)j2|P6jY#y^L^|ieF~6sBsbss1Uf2vJVdqigJNVesqQ-m=nn)y?dgo z$XnxzTp#{@|9>po`|%a+#HH)^P7lSO=62M~UNg}-VsrBv8nBT=GlX&8Q>hI*m1K?j z9gtn%iT9;C(zKUz(2H=xq;h(cAWU@D3a>*VTrb_^(oG6JyjU=8z=i)kRx4k-v~rYi z`o8x z(?$8+BPZ53`F_58@+TH0-iC^Oba+ZXu%$=~*b0#7M4kys32<#EaJA>-g- zUn+hMFi{N|aG$71jcGsSwLUpQ$@o%QB6&t#yGC_=&)QUf0BN%q;(if1UN5h@pPrC; z@rQKR4-~FS{D!?Kb2L(&`+B1Axub@P=F(p+B$~0~-D`f((~Vgny3ixNld0oEQ0h{D zN{R!=SjzLLo7ar8AHahnKdiN9Z7XM}i~OODa)zo{^a|I)#kjLhlhfpfVb7l4)9c9z zuLmoM=}KqbWGp%icg+r2eJfgRkM;#V{v&N?L^pLKPI`GEh6a2dv>vo~t*SK$A$#c4 z-(fA!Pbh?kyV-*Jc3!0i-1A2<_K71WaGbxphB8mxId9pCn$S-C{fIY{h~ZaOWiPCd z>LLz`#&`d)axdH}f7KoI%|UgZmRQSVki+$+zE+ zkNS5U`=K<1!Xm@?-;3Be-1AxxQ?njG)6w2U&Yz$!mYUaE40izyTXE zC}0IPY%c3XR=u*SP1@m`-rRU^sLsK$^dBqb-^~R!)-U(DIQE9KTTpCx8HkmDsj}$J z!Dc*qkc&O`pD_C$(=(;QSWr5zajb1HQ-@xccA0C2I{wKYo{up6$#m=I5^onI^PiPe zns{*q4F_X9+v^wqxOSnhq%jNVdf>B#NB&A_m?VK5sP3H)G=CWMOgK;YYZV%qED(PZ ziHHw?>eS(76vSfV{$x#=v-k}F-hVj=(Ka!ys4B{mXR+qUef4)zfN~?vJY~ZT!8=&U zhQ(UoI=!}a{kWvjJ9jqms&GW%qnaD%1|QbcT^F;_q>%8p zZ|iW~O%XOtG}f1~)CXbyiz8QMG*S5{lKtx_Jdj4}_@1?y{)vT@c&0d{{`ht?{A9`Z zNuHytkWs;h~NiAc8F2%&-w~2bArFnmi$s z)C$R~%^|Cylqv=|`9elt>XM7H=p9MdSWl}n70(QrcYh09JnG@(=71lchq?Pc z2SoKNWk@|N-(|kTqo$pbz_-*VLu!@=L2bjxLIMvLHdrft8*Yv-U6R@Tsgtif&v&0o zs9`2{Gfd~Kk1nYXrqn)_N;|&Me=x`Dve#cjZ~()$f3Q}W_^fA67QAUjT$W6I9piZA z;kf10xHX30ON@efT@{Y8jq4Zp+O|rv*|8W`epqO(ErgUg?dI>F9->t1nC1v@l)rf83t+53 zFYDR}>hHGOx9ENNfJu?c&hzl; z8eH#Vrr`0(nY#7Fz`V=!;Sp3vp!ASu{}Jz$%`UAF2SMB`>*q%I>j$CY?ZQ3SAmBj9 z15WGtYf*hWa6$@H75mbQo_sjYx=0F-a}1nPxu2bLe3|=BG`oP4G6HX7Ofv@BSah<> zv1zQ{wP=r8jN}NCy}H7v5t}$^7bd*NIZ925D)FjRtwCr$V76cEp4Ij!Qs%)WhUkAU zy?EAh54P*v`g*)w>exq?%j5EFS{)>UXO?1%#c?e=;mJitw$Ruh`(^qNvCfcu?=rzv zD!1YfY4ZkG=p9&A0#|SE_I48GMjv5=_dE}G`(5wD^ZEXFECcJ!6~gV4oN_fZ6GogC z%YgO?&zbK!r7N@Eak;-}ievL=Z@F&*)xVLks@Tez_sHW8-lPm>? zTEKvshaKaCXSwi7hEkqK{Ob(r$p4*j@`}E0d0IQo3#(=V3$k_wgpFEupIww=qA$r( z3@jQY9`i;xon|Yrt;v}#C~7eq^1pz7LfRQgj&#vj!F<|$H^yu$+E1Lpzvg=e_K=tq zHb7ed3-bl`jVFF_m+$T(@KaT-D@ik))jn)~>+)(0m> z>B@^w*SVYXHAOcv`WZYnif%tSvaa_+LtQ67#A>k0533noOW1D`SetS7mw(rwek3Pz zl|1F6Ra8|$<1ukLyKXi^n1CFAT`>Uci$knAC3 zQ}pG5*X+Gd3^hPx3Z-kWnIzD2yB52#qI+62+wd}X&+1oyPdHepoXU4YeQ z$m~6<@G`V@|EG_{Ztnf9P!Wz+e1oa1-+1nXhx}>|)*vdq}K$Khn#H zO`*tYP-u}57;zzZL0X@}&s3FJw_{|?-s~dP!S0##x@t3_-7Iy_HEO$%;!&%^o7zUcK7pxHlHkt|O0|{5EfOMY9q& z`!k9=MqTYg__^a?clyoCUK|bYmD-j<8@&j5#)Sv>!x#Y1E0k10?W8t9URN}!HDoA#q_Im%^Y z1$pVG|4GuCGH0{y*ZYn&JKqma^wym$t@b!3h__ooKLl%pLU4Y?lf~ANu}K?M-0ex) zN3YVcIZ3zi(c%R^R2hjrk)wm>9(YNfjd`3GP7OJwfr z2=*F+^ed-D0BbSZ+ex3=uVX?<3gD&j z`PKK5@wt_*qixWgoA;+TOWEZM7m@TIEF(`#jBm{EO;fQ;`5K8 zGwFK5F(`^OZm|G#@}iHO3#H5jUOX#g+ho!dB&8Zr z9;cu<)EFv-Y+FBETh9O$>YVm)U6cv47EHjaOll%m0ZhFkljr%>W!nv6B~)zM^I+rB zm|zXNFfzioYfUPw!(ecI=~-oZ&Wgjeu{fgDX{%r9tUoHti)Y5)BP>+$(DD83I2?X) ztgc>di{!Q7u0&kwgZf>Tv|!CHBjjnpG*5fD{a&Zd`)#yUoXo)St*hO+xWZ_v6`imG zP~#zORg8A7pT!z2iIa6x+O&a@)D! ztv$QMq7(M1Rr!phY6^!-D^XgT)_jXU#jCrAnVE0J!fvB@JG4M>nOQC2L-{ zy?~RY+QIC&ahyn0DI{NQ#e-L(H%%!X~!!}$d*+2Xf2yGFDKal0nbzF(Dd>i zL~P#KFBza#e%pI`NHyT4pd{M3eW(-tg~QwAGIR)$y!zTzc2C=0%2j#gQxpR&+SBec zHliY7FmaU4P0IC!p3*4$gPL8R509QCSl! zBv0VJr}1z|GIHRa-R-MgVX7sTON0e!Wh#z8aa(_ahRZTA<@SP^U z1@Mjj$i?vI=~kmL`&gy;8#-3+CVRM7)+`t}qnkS4iA~f%d=LPQp!_$>)6>IB(p;y? z*PR9f=~zTus9uo}p4LfD+jE6k&g|QPgmLM`<%|lVKU1|Q^^;T^?<(KMvRsIJ`hk;i zKNR@m2GiVlUPqGB=`5d9KeVcg7Y_?PYHY?9AY^y}R#Ko7;|EjEl4#An8kcBg>Yq{L z{B6ly2Mcj@nLLrPwxbKww0&PvBOjK2nem^;vj^9NNP5lo&fc|}s(QP`(!bsIbDI04 zTaFhH@wd>)cXQ-N>uob?ucI1ykEwNixa=Sf#(kG~jLlkQ0C>%w!_})=5AAN$1dDKkVfwsc2Z~4gPj>r6y3(2u6~A3FmTEaq{S31 zvF~R%+RUV*d?OM!#YgZ-IrWSGpu1b`U!mAge_G2=Dl2*MWjDYwdm@s0VPZD%%xFO; zm==K2X2^2|Dl{ETlN)%4G&$x@>>=%;p+m8czF3=aC19J}`P6Hu%Mr!mq zE24Ju$O~32&x>sc$4L?^qgvIIOg!)Xz5W)Bn4~#n8$gTpJ7>XO<{=~uub%E)5AA5J zazq3X14%X3i1=IQ>$8rEDTL`mXcGrLAS)fDyijs3`f1V$Z}2BDbT-LA@ma>h@W`BL zuX>|OyU8@`k)X!#GYEK?$+~~UQP0D(!sv@wOTVL}`rf5o1 zx6sW^dcaV;lKWu!Q>lI6%6!yT((V!?#jqPgsyZLP8oBUj!<<;NA2j5_eP$lG%qQAhfuQ`>PI)Dxu&AKQn(XLMhE@PJ z8?yDLC|dh5DdgJ1mEy>MO1r!1uJ-_w_Sucav2)+_t|m_}`k-pph^=gc%( zo?!RoDK8rdVY0f@EnKb8os-uZBQoRZu%o>73m9chHt-QnvmaTA<&YaW=Z1-2uOQoN zGPe?jG3og#Z#1|7Np6F?me83`6EM{+&s1lWyxEI}=RWGO z->FwI&H!RFq(K6n2p>An^j*uD%NiNDkV&N4P~Fzh$#ZxKElt3NCMnH_PM)JRh_Jtr z@41F+^}*?&KbV%9H>3O)jQ1|fWCW#KR9FUFAPe=d8J3lp| zR!1dM(Du)1sBplx%o7g8=kxEh32PI6pj8epu948i1iw5p1pkO1cdgsc%K+nHfh?6m z0F*C(xez8`f5O^eStk*q{I%ZG#mwWw(nK#sOJi&YUYaY94nm>v?WbO5{N*mqFEN4; zDrbr{>@=qhl*D}_)Y)Ij9<4O-F3rmc!h&CC)IFF3d)Gfg=f>7%tYRbs2^5*^=u#AL z1#lz=fVXs~x9yx`l=EA=a$|A$WM9VDw#iqDj^SpeOi@in&_PSL!q`pZh8q1n)AFG0 zdQJFiplXByC*p%NzPu*;IS`PGD?{Gf8`O6nEiet7YhMTWf5fa6*WBUH z_j2Y(h zfh;vPS1BN2Z9GpeV`9tmOVitTtzBaW&#(^Ukmu|F(IbwxqB=z6M(&b{Z?E8c_tNAH zB{FtN1b2bnk12-|D8HrycEJ06&WE-U9(}#_udJLSoYOpEoZ0LbM%k$14%YEEy$*Mo zz0m||8MwS@nFX)oh4#}nEw83Se}M5V(F^8>z&U)ZE`wQ|kE0Atb8vcBcJU{0NJBlKhniF1RgT~6Hg+}J4 zj=aYbG-Qz{m(~KvNAzC9+BIK08+-M=86UgNQiz%WnoQ>jKU@RATHv|U5+UNICCLHO zj@IV4v*cwwoozHf0X1OmtOm-6L6$Kwx0?xy_tsWtrpI#r(hDLJjy-sMk}m4cEM|=Z z_r75%f|sk=%+;`Ap94F7dK(%t0B?M=MlDr=vhH^?6-c_cM`CJT*i0oR~?F@^dh?cKCfHEmfYgaCPC z>Xp^!0Z(jI%Q^tn@`E(JzYKx9kC&`&1p)b+(oQ|UACYdgaQ=q^zL?jr*4J{H3|t{T z;{=zcVg?eKrpP#Mqgs!dZ!T3gZ`D*~XI`#%vQ`#l>76w!G%aOWW<;(28d=iD`y6&(Sk!0mk7#bqKUnI<|k zfor(~S84CldeiEA7}WgTwe!Hb$JV?6E))wei&shXm!*0P*Sgdw@E#b}naB89X>#_& zKX|m;i?jI|_vZ={d&4emrAJl{ks^GfW$sD|s{r38N_lUs$m2`m{R3^%h9>#iGndN)N&5%Jqd*qtz_6ps4_eXFp#^Xc ztvyY9xN@LbpWjwv|)2{I~fLX7cLJ`1I$a z*#?&%%_AOsz%6w-vH#$|e%v$_gdvK2a5QMFisTU)kP+$owK-KfpK)M3T zQfQcgvS$mE!0G(*BK_CtJmjzH)#`rPG$4CEqt*28Hc|E4Ynl0_sx4N^PR~J^!W0Ny zz9OA2ti9G_`Vvt}E+jG~+ad6-aZ_!^`1Ta-vQKbyGq)#ySljVcm@rFmtB;)XzMKTV zUqXd0=*_-jsXIl)zUsXAM*5Qd+&9c_A)y5@*aG*6B0eJ$puL(xxj35%9}+$Y$w!?K zyJWshE;~{f!d4LWNpqBe06}+YgBHamh_5x`Q`xZ)7UeZ5R zZ*C2sXqC5kTVUqWN&jp*3zz3LELU^4JnqP--mTHHpN(zgF5@T4HB zm6{@ci93b!zZNn+S}UB_!7B#H)!g*p(d#JzJk1XFel?~fuuJ_lX{V*yM^_D@n@1Ac z;(>ppxn!6Az|M-(*1nI0F7yT1EJly|Zc|fso3ENd$iu^-Po0mse^ss~{V-na=72OI z>4ohT_tU^t@^E3{?ut@f{p{n_j{EvYV;75DpCSsRaJ2^^ESVLvuA_tK=I23MAIo7M z@F|R?BK=UYfXI69uDioL&+-;SzO>RVw4D90yWD6mwMzKAfsZDH*tZ0}1BE&R`k{Js zGZhwX5}jz%?M&pQz1bcYt%!b%wM#23#l_R-vkfR|zhKw-Dib85L0SpAN%3265o^I? z|Bbr!2{kf!ET>U#9*M6#{BIG_-#qqWX!v`24jJSF>zy5qk-0F1{4Z&~3Cyl>X-}(z zzugcr+qowcrn}f)cO1*DGa$$YEO!cZJ+DN+YZD$xAD6C{9ThTJX4$!@kX0Jt z>WOs6%`P8xoab&V{ndEqrPqUE|GzT5Jsbc4hq#tw!q`J=U`rFNvSi>~>e{4|)mB?G z+&T}*M%sjTZG}eAx`vP73&L=-YwIjW9vicbI=+(aj|;nF;+QxLho(h!pdv{^#)>|S zqt#Ay0jTCDm3A{F*+I=kicF$LBWf2;Finz1DK|ykXByr?jwzjDPDxEA#NLNSV|~RR zo~0auneh{olg@-8r>n0mWiwVb;aECXSW%WkY%$J1f71V8qZg?@q$Z^v!@l0KjL5*( zdrsZk&KC1lmc^>>^M7gf5tk?FSy1T5f%LI@&EiUfLK{wyw7e+ z3X{{;=BO8h_23Kms6zN61orL#AwAmKwOUUg_QJ6R<1&LH`j7tEH`1Fyb#y0}U(PQT zZH?eG2em?-E-0npvhnCDy|r|?fn5r74W&0H^~%;Q)lEkBIgq)aXP3dQo%Ugt&n-Fc zaYRC$qK2N1eX|cu;T3PW#VKneplTmvO5rWD=NHg{{=&etSp(-#;S4>jWtuZ0ec>Fn z9jN0VD_#WlClQRL;Sj5d6y9UEo$ z=~Rd1tA$pg;&ypbQW`K5oDK=MBdAp&7Yl7?aVhC#;qTLnm6^q%hSAr>-hFjsxHCI zp)t-_+DO>V;}6Z;ZJ*%Z81CWRjr(hm{y?Xh7RO)^`p6@|!tF~4WE&rkAJDGZ6kBx+ z1^FcaVUYWaM6=HWn@?MibJjIMo(S@$FevAtHhJrVW=m}CP?Pn5bb+RF$E}Gc!Qd1uyqfB1W%5xeidNv6Y^^em* zGlLnvG}@az6CAwRhCH4zu5)Eu%#9??PV6* zO$m9u!+kp3ky~5Eal=H}`;Q)4pmGNKD~h;uNKez44G8pA6G;0;$w5DY2afU1Md?!a zOxi&rbTc1Lr{y<1U(y~`lNTs(s=tUo@REKsjn;c|;ADbi z6@glvjw$xF@YGLRXjT+a2WL$6>oalD|&3G(E2jxXGw= z{Ly1(KWei(WD=%^@B7Yb>#H(`qiKry4{c{}3Kb24NT!`c-#tI@IJOc(Oy=Z}zO^9_ z2U!T)qlb-8&=kI*xiRkg9h-;GLJA*gOz&Q%8k)cc><1&@4T7xvS1owi4 z=)2_!V?Mj20zGfiX2{Y`57*#ONkJ-CSYojB>G?GIunL=(-ZfLSN#RzqULme8G)ILi z8B(hpl%U24-8d^r_g?23xAnYtu)hZ6oAS_Vyfx5TTcTI$kIChOM_Hc=R%rm%pyP&% zBMn;o#V*dT$n`E&18D&1#kdu#I`=+HVC3LiZ+75MB}w1tI_x5 z&~s@aPEdA1-QOS zdc&G6zR*pBe7iRh6JQN20#_yswlyEWFU@yN>NaZj7aiuAS`>ms%I}rwqzwkwD@Oe2lrFsC(DE;`-!| ztQU{4;y`5 z$b%(2@X=N)Q6-=|D}_B+_aAg>Ic*)yNGtRtwZeqsIBV?KA> z;kYtlP1TBgx|ba_m;?6SW?E)E{#8*5k1LyWx zaDp02OCg_^zuPn-{-26dW|trx?rAd7zS?`_Mq04id6|XP)cn8oBY*Ane1k2Pq9Exf z^_p1>w)yjttrBAfCDN39yJmh*t-CVp2E)CM&y-hWvHxi%h;C+1C9X%|sJkvMQ5-jr zJY8zmdq_W$y~7-!@q~Q^@ICV58-DdTHgSH9Bk{5q9uY@_m*P~yeAjvVQiO#KloT`t zK0fV!a@(E@TYKn`heHW==mPawZm zJ$;~Aw**N$LK~MkYSfl~5cBuHhmkrtX})*_g*It$3i%}XeK zY*yeS<6)m_fN<;hzW-$Ga%KxWD5&4h8mOW`-dARm4*mQg@U+txz*rZHIVpjehQ!(! z_N~-SJ>EBJ_4;mA9|ZSefn#I1;}2?FjzL26MD_50$0{SlEwyW zbjNbwX@dw26ej-)nR7u8myNP=qjV2upOf>!6ZU~4KvEqDJ9yEbmS*(Y4(Zl$!^S_v z14ZCmOe7|$_L;}qW7!2@wkWl=TKwG}yO7rFVo&%An5HOdDv=vfk6Ge^3{E7G>`Dhm zsJxD)$?s!Q(q?_Yf^KrgxFE~w6-)}k*Lr`3$QJ|WKX zT%Jv%xA)Yulu`7=hJsg#=Q^H`44(ae*n8`!D7!CgSV~G-K)Mv9rMpFt?hXOz2I(#V zkx;t3OS(g)q`O82=@?*uq4T}f$9R74^Zxn$_pLRHHR3(PIs5Fh&yMTb=jE_+tii2} zS@t^#tjOJg;8Slrql)Wmy9&^>EhzsjYQNPJVU_U>qHV7ETFcnW8uzoM(D1vL(tqli zC{J;<*nQ&${d{AbTMAgTl**6KRn}@b@~b@tReWKC7WVuvjM*XPBQY+Ea`Eb_w0#-2 zy7?AOXydCVEo)%}8}6k&ZEeR;k#uO~MH2qyc>MEY7J8osP`wGjHPGRqvHV15*fq7& z%FX=s;`!~6?(OKKHFCK0V%M<{!oGbTu#=hX9DqnR*UH`b=)Zlqh<8faZP7ce{cvii z+~=ZSZk)mwV|J5KJ@)1u#V*#h8Xpe^%5;fGFs#2X={S3zQU1fuSwSQXw_&|aldeIz zmAaciDK}>y5vS^tX_IXcGk^r{Xi!!Eyw3BYrAQ%M2UXjV4`+S5kP$hwlALr^XI#)+ z-p~bdcQJux1dQ69HVl6Gv|&=%cO8e`l{mo?Yq=BRhfBMMQgj!9(#h9>xDed@K(<2 zSJ)j^&XJvM$q`3fTyV5=WCoE#@}LxZOeiH%+5-*{W7R-IM8z~e#uVTwq_IY zZ>xxi-Dep9FZrf_EWJgxO=02XN-N*@z;CYFB728zJc1CY>aYNSDE4j8ddciqekaTtSieZU(gA8d8|L5~T@L);QqvVz5&IJ81(Z${RV`F7_ z#*YHufsCKvn>#xPBr+45oHxGAEXpOSGirX>p0dt!KQJENP67Hlydz#eF-wovsIW=C zFRc3df6&s{ES=1mOVmFM9vj-`twatwE+VrGwh51##Bcn_56*@(Tfh}Px54W_ILSW< zb$`()G;E~-Wnawy`T8G^#8|;2PO9v_VKeym#C}SL6dw?F?iK0(-M0o?lyVB7KIYHk zo3#G5`+o!J`4~h+nWfJ8c>fn$2|O%Xf{LvqTgTYr{(m8mfoVCZ0Zrec4xJtTYd~sv zz<{hY-~Jax2xYbC4qP4S+Qzs48c;DUFrcZ52P^;8Os-EEAY1bFk9_>E0jbCX11iyH z>-jI~#^eI>bBfCh`g1D%mH|p$3NRqG7UTb-B8gH;Apz9U$0Epo5{Q5CZ@7Q~6}xsn zVEr%o{r?LJ==*pG_Xa^PPtr2^rM> zPX>-OpuvHZTX+8lM*sbvl=diqbs(8ZuKBOdbJ8(@1|y1%WBivIg1^(3c`7)}|BD_G zLkUO$H879lzr@!{1x&?W&wLp7zeXemkAydg^-H#*nCMrR&<;L9c+5YHmYBEj06$Z< z$0+|O39v#(YbN_Ygxv)6XVSLo`1Fg(30UD{_yh04hi3$!W1!(>Dqh_`)JK&7_x9D7 zTcVqPY#XH_I-(Ejqfvs3D&W!WkNeEbMzNWk`wa^SMY;C*CT+vXWt$rJg#d;yb$`e|nsY z9lpcZ`-1PuZ$%T8gHJx+OPuphI-L#Q8~_>0(Pci3M=5$V*8Sq~k12j(j+yh@=a>-Y zu#MG?ANxZI0-8eRLE{eY_O<$SgQmUO!M?5MU)tqGndd68SAHCweU1TxH#8LXm+p{W z_&9u@9w)u+KRftkw5VC24!?K@G@|On64aw3qW2U8Va(nAd^r#W%zG|-knM*wRWv$% z`NB>Smu(5{eaIA_&3Ac`|21)G3_!_9dVt#D36X57J0x_#6BL^G2qX+>LJRd<S(>P&cRt zB-~PuT4b8Y+51C_r0L-mOMVp?@J)skK*1z*~Px8wJlYOSFYEKBb)v-k|)8p z=#Tk{sewO0ETQ&Eytmy8qSZ;ya>B2^ep-UZVhIY~pd0u;%O>2n2tQ^?kGOmtDxh;M zz4osEK8;QjAyY;(*!3SO^mjX!S0O662uFid^Xc-S6jF{?{Izlr&6?s}zpc-@JjC8{M|@C8r#5&zu2sl9!S(krGf2u7j*~I zk!s}j$^9OeQIaZu+;I$@gW-ruzCyt!*<+GC(~Z4LSZ$kM|ms z8GczbQy2`efztuitgI^wQvx0pFWpeLKig`RPQ zzfCI8i!B(rGO(S;6B06m3XCB-p|E7Wqaj0+|7sRoy}v7c9XvC%4Jh>QFK!a2+@!eB zf?aQ$(vIX@92*S;f;q-%i&+06i#aKDvP$Fcwq)3@XpU44u5W42b62f;KoINdoazl* zGSQ&G247-ed-h|4GvS0CQ|x7V`kq&<0;5=*iH+nML0_5a(IvMO;IO4RmL>YL<&6

    =@Ei-6^T-_If9%bJ~))`ooc0qBJWLxQl zew=xE{{E)};(1GVr(gqEBLh3_QH-t8S%O}ezQ?vie-X2b8Dqwv9>jTQR@Mkw;6ts$ zPY}f2()ttIf%5iKckYqt<2_{C%VQ!np3*}=<5W&UDSTtLCJG0DrZr5?>x1#)0BdWd zCvt7pgrsk)Fp~uUwhXM|IJv>-G9Ekz5d2OGhgYzXSb*-q>z4V%9N5hCkFZBXj+R{7 zF2S})eLL-j6iXkLdQn)-|1hs9-Oy(#HAYXxnXy>WN6?`(89ut2@yEPipVSREq^s?V zsevs(1BhDTkugbQvL!XKEtM04u(xgVrWBnEd@B#ZLy`iS?p4WK=|&^MBaJ*!pF3u z+sJ%{3ytsrs;mUD0Rhj`1jm&gDO(@&#g>-?3CvDITr*G0WswjMQ#ygVn(4Q!k9(F4 zR#R{MQ%XNRV@C_V4WbxnZ?HRay}3FcytfgkGTVB%F=B6Up5q%J{fy!-ete7<;*kI( z0O5k4((7a|7HAYCa7nyby=ZPbDtmfeul`w=pf>Z3)w!zUD4)7k;IUT}< z2+Kr)FTEX;1#)gPa& zw}jmn$P0^O(AEQZMcar|X!rY|zC_`i&-(uytJr-qa2PRLZmQQJzQ-*kzUEWViVL#o zf-Gn=*t^`igTxCoXWMa@C1{_j1qp6=J}JyG$xg}VwuePkZs2o_Ukr44Ox0|0wWZgo zEjnt0*wNWvzs9I`SQ0Nb>}c2RwPP}9)mk6PWF?VH2YOe@itDy0wShZF_(mYMQWSxy~am`TNaApG}51mZIW;df*(x-sPg7eLO%#nVy>=!UviT zJDRQf98YOhd!IW2ZQb%YEk--bGv7siZ=&P2D4#f0I@4DVq;*vCWun%`q$tShTI5r2 zVv;$``nI`B*OE-4-WABF%7o)Sdb!;7+zv!5pAxJm(m?sbWUktj-5wmho>HEOhR?uz z4Uj|LsWdt)`4b@5wC!T@tzs9e-=&p71J=?axDIBqMd0qTCw z*3YxGvi)M7uh4FxnT>=c{Rt5R_D!@869bsL$_`>vyWAboGcM%vmx4&61yD<(g}1xz z%&raUz*6H27DIE_dOg{}?Sb=MDs)A*X5Vd&C=}Sgn3nlt^ZSz(`U2m}0GFZKO-|VX zVZct7vTHvlICt8f+%HgR*W>dzUrStVT!})b_iDYpbLwGo3WoWOR~m{W^Y1biMh%Be z_|Kn3izK)$dX3ng!W2c4Uufe1%nJkeCCPJ(Neenf&``3_0CT3G$D5-jm;&r`)#sra zlJr4XIl1o{6}{GzU2m=j?h`{iAO`m3Hn;ooSto$`3kMrOcrx(hI@h`h zUng}M5uLibngC4M@fvf*WFz>uV6`?KF4#^Bp+wJ>j+W@8E?_s{%0YH5^1@sJlpUOd zdNl;hI(dxat_@$EO1*Eo3Ag$uZ*MO5;sNY=a9kw0kVXatd)Jqivz&<45s`+zx*2^! z6)88M1*8>VmEHQd4ty2J{mL>up3O-UU5v|a&0~^tj=JHXVCd+3*(z$=!B;L7JLWz+ z9G8{IPdk76y3pO*6$)k+s_YPMMF-MklyuNqv!T^Up4TmF$y_#5hc?L^ZWW*o*Rqd> z88hp+8_5{<9~d)YG3_@NqRm0mLS0uPUXnwEIDfa~FMgbL+v{+n@AWd#!*cviitkJ$ zfi!yMijbRqGLMLML(>`uf@T5G2%V~Yul@3N_4L*(YAz7y4v{pcZ7vJo@zN9B?t*|m zRtQ0s^9JTgE*)2BWB5;l#z(VRn?7>dlj+k>q`uO4&$Cq}!Y0O%%5UIVT*F?xHbc+$WWO&nAe9u_YyH7?vE8Q`l4{qs>k70Kd^!AtV}NsW86er*9DVgXYUI{$ih@o9 zP-EaX+@%_o+yV7R0i?_kag*{v#$q2iZ9yyZkW~N_fw}bzwD4+Ts2!ja6>Su75Vdiv zi}ODbXbvnh1W@B`hufv>POE+Uw0k$m6yBQv5FE|zXwSqfD1S(8vK51byMJxH<~zB^ zNnbR_ddBXsm%?weB9(o6#TDC#2IuueM=B1kT!aJfo5Mzb40jKrPq5a@vHH5N(^XJ_ zZSpW_9m(u3U*-`KCcVz=*J2lkPm$LZkke~T#o+|0c}&&`A9-*+vh(|u4#Y`z2Qr`v zxHm{$Bk!_4Mqij=ML@nuW#4w3fU0t37U8`};k}<7lWYkzB&N!h@_AU9{4H$w&Ujux z0%oGC9AV=JvUw1HbeXD@9UDhib1GwCjFh40<(qzUmJ zS3U2|UI7T6ahqp0;~@_Kx`lR2>Yl#6{nYo4io=ao(`CsO3QR z0z4;Wz|B$Ktmm4XS_zktD#Wo``ywoP&eVKw%NN19)D~c%#fl*yzZtfg$kJ(jUYYFn zdM<>};DuRl8zlgGFWsEA87IOV^BP`+7#? z8|v6pL>2(37j4f)#};Q{u&1KXaV>x*1yy9t?=45Ht*6@EJGZguHu`Mf0)a4nV61Fa z+2NW@e$O9a`*oIWxMk0J{6Nn-Fcf^hQZZXRWL2I6L7og4Q0)5YiT66Lk%b7%6O+Fb z+H74J|NfDzC)SWFId<3yR+XWLdxo3He7&IuP`XOnP+WzxQ^5J-mtvu8)=V7EET;_Du1-ENBZ+ zMbJ;@cZmZ`euOaO3$2gv+$GJkS^Xv`*bCqT4e=gy;pN$aU|+LF@}9@$d7ajFzArC? z67Tb`yq##%UJnRBN&_M9GW!DU_~R}InM0bb(h5dI6kB$c?EP$xZgGUu{v!Gig~6r1-0eA?6poQWllW4|;rse;)@M`{|qeTWxOQbhYRKin{xCBa~thP z<#%Q(Z)GX^XBsWW#1|Ba(8I`rl{`eXeE3oaPv0W7jpa&NekS3u%hRUjwjYyWguy=8 zf;1crx6vkMlrAj4;9hewr|{&`2S&$a8O zAGu3MplQF@&RqY%yqZ{MAn`f}fZWX06!J>pUpEPP^Y)0_-nL(-XwlLcE&4mPuw34* zC{CD-!S5JUO`Yy9eV!L`OjKm9D(8^G%%$KtG4B;dF+F@JL68BE(T!%w8ApO>P`X0= zFV>i0hkdYqL5iv&4wgJN0z}a}zh0drsQUtY`b&Nb;Ce-0tonJh?Z}zs{*)Kl>uAjR z_ET2sTb0KUPFX_Brd&?zPhCk>nJ8kr`0_jNY|S8bJ(0v?0H@|i@#V{hbvAPz76@6u-ZKsJ^?TxH0UI-)BCNQ|An*$lR|VTzP$uJO?xPtH&V>!t=agT2Msl*T9>Z7M-`-HVg){zr(c*HS9LHAvV;+uoYr#ssf&W- zNJ#@?&-v5WXf_;QagOY=NDzd39dDzu4dLv(1k*~g;s-S;FqeApJ)8bQGczDa!RHVW zKO)aZE`@yu?$2vq685a_3dNLO&I$>1RG8<B9#EiusSy_>lz~ zdFWh7(fK_N*V){%0q1qgMJiZ?quybq-OAXJQ7UT3DxN@Ft^5VmG{aP-;YdLWGp4Bu zGm{%M3P;_8c@3x}FEB-q+h=*ESL|1%O{x_SlM%iS3FhsaPL=+W=zoqrua3LzUfNm~ zE0tlmzNVb=sm0Uo2>CB5-hs2H>x4EhEk)P5eVb*PI=j{)KJ9ZeZ5OTsr$Ztk?wTX0 z=vA>LT@jugUZIQ9KMvhkk+Z;YaA(Hvb#aSH8!T|{1x;MA9j7nu$g;{qq25Y^5IdF z6Q>+ka>IQmUBGxxu~eNOUfR=s&#!jrkM^eI6hCT_}!%Uav~|-Si%a%Q?U1z9>SRWOc1G z7#a{h+$@T`%tL#srWlWNNO=eT_v?A+oeoIi$hH6Q$X~j*QpH=1|7abPQqUC-4VTdko&8*YybOVpkl4{$2T2irGl8v0Bv?WZXQvNaC$ zF0CpO`$T4Q%mfRAr6}uJt?KjvO=%<6Hix31bHx+U6*DE!{XCNQ%4EpfFAs)N&yf4jXN3T&HJQ=@V9K*`R&ffpRv|B?nP0dfk zrm05^Zcz7XR_86V)mQw9GeHm7`V_U!o_lq?^KhC+v(~5G9bGy3sbIy%Ma^fIny}MT z8rKK*3Jt+^q!Ie42m8}y>an~zj|+jj9^MqawS|N7KC#9o~hlEz2dbt^D56UdI@f9KEZAtIbFAVp)%sO{#} z%UYdY55B@Cu)*Ba&B?lB0lBVldiKk0RUou(GXr0pb1FV-kVMH}iU${pC1u_8{4R={??XS@|dC9$>iIn?QaEx1*i@Izy@I8v`hz(uEfe~~d% zgX*XpTUlxsGUei_|Zm5xNE; z>Hs@h%Gpr6lHAnbq#Nhg>HFgteoqYNyAcm8QQJRh_03|SA|v8mHMo3V4-vI zY_?xmNnJ>yaIpIbT5%o}#yGt9JDyh$G9^AWiYVrH*|u1igST7_!>YT{`zTI`jt^wb zXb2{NusjzT^M!S03YSg#wlyB#fg%l#*sIMCST>$O51RRcz4MLCLdggMh{6pCmz{W} z>yEUT7+=@L$yhVpGAh;}O^#~+t}i#9M-jEBP{dIu8bXL@6Ob{n^B*TUX{R&n2iWbECzKIe>>XZngmu_6f$PTq9l%BOPUDQiI9P@>R< z4Y03`Br0M(=v;a2_e8D(xplWj=a@Lz;yY?kkGc%TBFiBDo^RP1^q`ha< zf$j0xL*TNh;gjui>M8DI9tt1ay95}2tS4Yb?XbVO88OOoWysWP%Uf3t8MIB2qG}@p zQn%17QWC?{8kEp}VuT|n-t*y5My7?y$7w>6F+mKU zq*7bLeR7LG;;af{5O*QU#klO8`Q5bgb$yDGLnmY&FPlqGef0%_|Sm*%rEv@0C1*kHlp zAaUk{)v;B|<9gm2|1x1vCNKjc_O7rbKx6&US<&jP=1-um0L(VhU`_Vku_R6GLzI2O z2EH%+UQ%@(^HS?muB&3dokrjdB|XkJH-mF9WAyg%_lHW`tZ330k#@!>yKQmTv?xo4lQ*XRIukTMkBT?*VC zGyX(i`N1UvsMZl7l2DA|jJ(Q^onX;d=L(_iWZrsxD~;iFXD;@c&(%AzSE~Wum2=hB zbF3nG$azl+0bcgRrZR;t81fo}cvMk@r5@GBD1)ZHx}|-_eb>r3z#70}O_p102B9!r zI(&d{AHHk6u)fNYiSx-gHj|la7t~?MtRnt|GYvcBp@gWfHeDCe7v}@zA-b+6N6bIo zAK(e@6Tok`d`M9&I{xC^b9$~4hBtSE3jS>F@(OJVm2abo)o-@)0R;^;b!}LR-nwGJ z;G9rvbE9G}q}6q|d7sfW86E*4BwyLCKZz|!6%WQ`h!48m17qOK5YgAG@QX|r<2BB0 zeGEcQ$>cl{ND#9r9K^Z*(%~s|z19wEp%vLR2js`&^m3YK&gS@tmwzmuQY!exYQ0kQ8+;)bAuP zC}<1FrcjGMW2n03q04_BOFR9jXl4tIqTO&OHQFyDunX2BBD5)6Ww(ho-_`0S~(9+`diUDFHyrp z;cjl|A5-Rw1Je1siyw=Utn);k<~^RD9h&+yOd2KUWqN^`Fih zaAMjpRe|ZzZE$at_v@L*`x@4POXltLBMR7h9_?qF5yrwWyz!h@`zn0WDDW3zDTwR4 z-bd}|lTjF6PZY?Ln3vKkRz%L2U~ay!zX5W-qrgESN%AyUv0fqdiBank@@{B%1cn4` zjpO2gom1!j*EaLLs2VNtK(&M0L0+A*(k}~;mrES=uqJ_q(i#67%5O|?>YTAAFUb=5 zDicg86v=XP$R`%Oj9#7kKFHsn%u7Q4>sX+)#Kwl^yo)}=d-PV7db6OhT;VENUQTkXtJ*L*Hn+)nxqpK+>Oj8F-*Uml=pkqYjku(G^D)>Ifp~JX$RM_bs zx6JBwv&`z_dvNl;YbF%<_;0uK+cn}@bY*mK5&;fQ@vbonR&<`%GFH_od9Z4NoF!KT zSF}NhJ1hQnGIzc=A$Nr3_W4VS#-7wwel;0RVqozXO@0R zLrKDQX65T+jH{cfWpBjEXF`7jKYIlC@aoHEhLuA$7@NKAZ=1 zO?kIIoUTafqfTqbs9l{$a9vx2qVfV9#C*Ag@?AP7C|W+}&gnXp>mKCJbKtMTlOpa9 zsOmkQMOa#nBfd$@60P+4;;`;H`mbd+LP114Q?6&S>~49YOs*{ouLv*391yVb3RS{CSH1n_0y4h=cq-KU5+59jGNY=^ zN6gOP6qBaDF#$sg7o)5&9Cm6wFEWU*iFL8k9*9UpQ!IF5RNx0YK^S6>?5@4HrRw42 zIf(Sv`o?sje#m;XNTsc|;wCH7@hXl#9(j%G#-!%k<8BUL)&!3pYnB0im@?UC+lq&! z&W-Hgw{as%y2XANEI+CWxe>6`M?|z4qKWNJ|$_bXD*$r@I)2nEyGgkvR+Lx zFdq701pP$jBS>&9A_g3G@2_RMgTnjV-;FOgBw};l_;SN%Np%P>FvS}=K|HBxP!@px zc-hEm^ma{a+Pb-=@rUNNX68Pt4fyG=imK31cFeB1Bo9=Bw~J1=Hm0h%ryrJS4{gYh z3B>z6&f#!0&a{;Nqso9ze`A3osGAKXrt|Gxb(2?owA-fDie4HHnLIV+syvBcwkJ1d z>E#B;?qW|0KF4$Z`IjD|!XHM=dB5FVe(6_7F(XORR$5oFaV{BJJ^>%BsvWf&{`ZEt z1Mf)H35TwN)?sxOMStr+l6YR*U=SPA1JiD!(M^{+;h-qL@RT4!*FyLq4F3`A z7{mS67sI8d+H?%I+bf;{Si#Zg3UKsP1>NzFdV>D)(4XR=+zUHFTt^j?H!T6au@x_M zF>?qyzD)|w2+dmGbwhz5{D`9W(crfe+fp{VlMU~!2G)7jE7qLJrthrmu;~T!ZQMtJ zaGJl%{(BFJG9j+EJVNexW9@XbXMNmW)=pD;-dI7C-2?28$K$+MKgKGWgXeH0xmz@3 zwWM)fzFpSStV*}hm)L6~lj>s}70&-VH~qBn2k~ zObuTARjLv`y&ABSiZ98+Cq16kyMO8Z z9r$+9&g4@A^Nanuem5JBqaFw#y+}7CYxtki(}CA8+#YU!;JzzqWb`j3!j4AVX=BxG z)9qbh7v$$c%PMR3irM0*Ob|LeoRe(Q64A#CL#01`G8 z3ID8@aD4bcw`C_$NeaaZRg!oE?&qK9gO6&jC969Kk-4?}%39wpjC>dU?!U4lcyPyU zn(gSR&6|=^(B7!|Bh*$(1hFA>JNSg3*83YD{J}92sUggBub&O?U%T#%HsH*2CaTgS znH`a9(R=!X)eoTsxKn=HCS*BvbCDBdB2f+eOYQvq6|o&QuIF?Y8&oe49hq?dIlgdX z{7Vhzb^IjtVrlmdLq1NOvb^2F@R~Q2sUYhuCoEEKJc+3J^I8n7dm~ z(ngpyQJ{_|Q{d8hF$>4`u?>JDL zJ8qDIxy?3)7iwJ~9kuq0zRj*HwE7tBCCqG(&9T?|5+VWYfCQJdMlR4-(i1dZMgEt- z0^Dh{+9`SoyhV3OMh4zAY*7_;ljz9Z`=PzxaYNV$K&YW!W>O5lmpx} zIGWak4&?sE%2f8_Uo4N@ARt>V<#fl;k3<+ByLgNjJOh@~87TCve+bh2C5_r<*l#pTwea`h7IM z*qMkfdhsz(*mX^d<*z04r$Y&AFgf6+&qaOb z1nS(W${y9#`d&XH=DJWt!-~!-)2H5M2D5A(6ce6P^% z5&y6A=8)=x3&fT9X6doKT&a3ZGj{K~YOkh2?fr>CH7z~J__>Qtt2>K&iB^EPgoNej zH!O?ZsM|q(pXxzYH#)_PY{*P`L&wFc)3Q5uPEUMH&yjY1Y@43zP;s==7UKA?N?6J@+n71*U*hM zPB|O0bb(Wn{&ax`U6_Z}=mm!E=C`F>1;s>WSz6Dv{Cdw@dvoUTV3LZ>BOT-trS{tC zo{`C!#o|JWOJ#P<;>y$O~!>eD$Mstw(o$G(^kMnlE-z!To3kq_8jw_n=5 zo=^LBju09(TC8fGE)1=+RwTM0=kpY2y=~a=7gG}O+{v3Jc!2$arU>wlWHwm!Cg*D+ z!ithP%qKU*de{O?dKJSlqk&5Pt2jD0h16j}R{gPVPP><~btzm7pd$&SKPu(^{thCS z;&_+dKiVTjNflfj%o|GAS*hP5W@9cW50ppqu!JA1hSS&a3Ex;eAj8ox3hgq7t=k%` zC`L9EUR+)Zv<}=w7SWC5zG;iUy1FWIEoXSmEfpdwew2JLBv>vxU^X)5jA1RX z9mO5e`PP2jDjxtRjJF6iKV}}AYxQtauFw;Eb?)>8xWD^Ne>}701Ty~bEqI_vNyXHI zyG%O#J}_&YiQfDoe3yI)=iPkF0Z|eal|j3A@0BGJe=ioCtCrwPQN?B-fnats zIp{Iw+m%pwGo+uJB_L1Uf3duc`P>5(=sj+FOR54Q+j6{%U%ddr+@%7p3p~6ypFk-?)c-N>{!r}wXH@dzK_Mv*m)6;+ zso#k2g@>t#3*wzvEF*z3;kymX5#X=rO=U!?9tB&!`?=9@-}tw&>eqo8SRrLmdWF*i zPBv2YaY%rO>_mn7!whm&!o%am(|{jtKy}lO5RpqT1Y1vNtL- z&(CdVT1ThB2DdZ>bQ2_HF#<+-bvdl>Q_p<6mpAb7qYj^8h9F=TG1}zK&gSOYHYj&R z+>6P6=o6W(&6xcq@W;%+oPsL__dTamOwax-rHFg!W@0S~29irMl5CnRFnWwNS!3O8Wwxjp&+~ruNK@CU z@^T;_PpufT3)&E z?4Nta;HhXkw)@!oWm|fn@Hw?^r_^E4nx5->-JFtD?*V|jO>#mx|NEDGr2Qm`v#TXC@e1mp1?>5m z*%j#q)gG4Im5&6O(h_c}KRv;^6a@KwL%F`VB;KB=VO$$b2HGimUcWZl{kp$7M$;Bm zDTzPHZ4Y{`87$f@o*JLk1HZqs~Duf2zlD8lU2=4Ok&Eqf`QDWjNVelI#Mx#rgC#(+a=Jp-+tc2A_$9r$lxr zmGen$?yr#k%ey-rQfGmb8yBhSPOYf~Go<7tqCv$zTc z_t0pxdySJw6=g8M^TT!tG(` zPKqAWtLUJnVq&bU0#tV{WQA4bTFB>|hcZdHvQ-isfhhL7M-Z?o-l@4*Wt6?%gLkos zV-LGG#WbNMHxMHnPg3*Zmq+T3^}Xa`5%D~g+qRYYO~Z;~GfP?Yd)X508uo*Y)h`?= zC_OVnpu6Qc-iHILr_R2V^(o70ty(zSXDTF179v3!W>ver`2#N5HYZn^7NcGZb=)u! zo1t#_Noz8Y@_BXs;80b9Vj8bJy6;&gxKcM!vq-KO*E6|%O=}Vz17m`zrrPw2v56&} zfUIl_@f71&#IEj^pQEtf4fW9NlKGHK1ased1nd}cUBLY|q~^v4Jbrk6H)=8p32n9R z?IHb$3AE93=>r^bxSVu)uCQfu%fzT2q$ z0@5HdeU%^W-2y4)hG~{4Jv-w-FQ|iXTVDI(`fl>Vm#G_zm2H61yEGOHkIZ}(+(8gz znqF&=iU&zyN?=eWPGu0D^NRsqF2^SM(t=kSXX=cq&3PY}<#$7W#W82NK%}#SuFJEq z_J%o=<^4)1wcOK#)h^+Qn1B35N_y;Xn;B^p25kiPiOws}YS+{g1}kiZ-c}-2J*abA zq-x0|`(&ssPHQRIz2UtTqH&i^USEm?=bl(W1YvFvNnf>KwR&ocF|qxlc+hhXU6apu zNBS)>qzDdqXZ__agGD&KiaqeGJB(ZLkIxArkBx43`LZ^I-p$pVW(g0~Q1AN-4RD<3 z(=x-c>Nks1QL%|VJVJe1V3TZq&p5?KHl86Kn>)8#w!S`A!{h6)c##}378y-FCnw!W z?#$LPVX<+@cr#LF6ITCkZwM@TCom=Alt%7M!30u!2WKdj#)JBo|E)q9!>r4_%dQZ< z8FCP>kRezupHB0*=iJ*neIVJxL-yfib@i#TPY54}q26U00f7#?Br2mY7}xUH=;O13 z0nLiOvZW-B$1T}AGD}w>vcQGtu(eIrfe}Hxm4ThIuw~2m+M+D6v5`b|uXmr$kGIi3 zZ4bw8?VKlZ#pBKxvsSHXhc8PWnR!9_;yI+DCc{O-+k1=A_0lWCfOm?U5Hki!|NQy0 z`8p((kB!-xq%NaBEq}SS(UiR3^-lB}c;TXYj#FtJLnTZB$&B z2>@Cpfm=G?CdMCrO=D1JqF{x!@Nb0y)--Z_Ydg(x3#h&!_?+bxa~!wk4t5V{GN-Cb zohOiys)3vEOw!{4pBP!Fds-3ag($5{TG@(80^M}9ReB1R3gWbYE7Rx6n~GKYh{q-D zYZ}uGkVzBp*QPDQ`EvCIjR!?O?p}p$tE|+e^RnrNP*INd_YR?v4*xQbMKFDJv z#I7qJ&5Jqb$;Ei?6EDWtW)1B^@=Tdb1=iQ0ArKG6mv8Wy#U+s2N+nO!HdefR&-xrw zT3(>apC1Y=%%sdCd;A(A9P}YJ?CwfB1lFuxW!YZ82ILrL7D$*-erBM2-~kw7*3~&| z47kCCld+CLouNr2xw}t3wSIK!Td7*Rp7v5)Tb3l2TGcV0)Z5kEx2{Aa@nJ}wt3}Qr10eWK6DP&}?EZ5w28Z8)-*+a=h&KWN(-2L1NdLR6nMgSu1 zw=>1psI3nSdC4Ly)2^;wvNXT-`zMFdIfMFY_wcnV>PnCNSPs7Dw&$#q<@hRV?G{ip z-Ym9^{VJ%^XRI6<1m{pYFe_ZL$f5}MCFN1B@KkpM+s)*W%p>OmmA5GIa|$0&2?nXD zcXSu^Rf5X~li45F^a_<8Ha+%E;FBR_={mC?cu)U20JsaJwrWK9M1H2()P(rP*Km6w zm*ZesQK7+pF}rjoC&;algQw?r8s;G>9FX7YB{u(=Ub_QVNjuu;h`^v%M5*8Govft> zQzPbZZ6y$@eHLk@R<47uC=aQD1;6gKkx*j|q>=6iYR(GDb$r#Lb-teof?Sg13whP{ zIVgQRb_tZ(?HAruQK}ZM?Y^<4`Ep^LE`iwOjo{h0L1O?>=%{My{xiw%UQ~M^>}J97D~rf)QGU!a9gC zBD0SJDaX!4nfSTNJQI`XP+DCdJMr3hrsin60F{nNR9Fg~B1?<;jqRF_@mPP_bop&4 zi(oiIKea*e`}%jh`Nz|110V0x=VUg&2XaiT@QP3EY1(|RX_*bHddzr7&Ubf0Jbmg) z@L!23SH6`nJ9xi#RQ{sx)T*h`e!VMa`H`b7JN{u-65U4EZvaCl4Dp-95KNAZ<{t^1 zF$Dm~>(M?O3Cqn7?`j>!jQPE8NPPqk5oBU%>vU&oeWYsh)s>=1<5&ogPwmn>rS2jj zna+6UR>e;N@&A?p)dvTCBdW9SbdVM_f_(nAX)$M|5%FonB}K@bj`~*evZ5+6l~eWF zc+~BQ6$swr4!Tl_X)K*`>(}f^?4v*~MqYyI*#^znjcuib!Q{=GpwL=NnCn7LM8lJl zJ@DMpmNp70k0n2s{kT6G!60Kj7dS6{Stlt(u3VRkDq|omgI-w;##O+Ue8{EFWcX}f zTCy&)+-%QERpA{q((%@uJRXJVt0CQnJ*9Huc+P?u`>zEe(`7T)Nozx?sq=MqW*mxo z3|2E`GA*a1qXlPD4iVT)892Ja9HxDuQqh??LYJ7h9P3@5tShu~-dT|y{LG;Pc~u~d z?hhnS8t$O~%0NM>l^XVHIL(ewh=t5ZuW15NTQ58{FJ5fE08(tN1L*0zOL*JhT-J19 z=LE2i8>P=dHzHM8_fyqZ9^w?_N)4_URc($zu1LsT1fn;jLK(V*la1Np=i{HYAuP<7 zw(--^NDpqb@*A36-}1bBUp@)m9Sadoe-a7Q#Vb*To!42bwC_fE`gpnI%VBy);nS-q z;g!v%L^cTN%_94$1B*Hm+>Wr&UcDwkI~0n*XeCKxrfsYRLW zU&ad%y5lj-p_Iu&!ufhmEhGJEhd4tZ7WXWcur?0p zWvJ?&{&pi27JZOBo8ut5*iihIY|#Q>LgP#u>XX?WuO1`aMI0R!Yb)NI2(e8Vz>x=` zMSK{p?2xvRH;V_(oia%wE|?O{Tb)Q%2%uPnsKF=o`PEr79V4I={A~VV)$>FH_l)@l z%WUneWW)13R=gU`*UXvxXfJ;! zVarf}jQ#7g8k1hXS`QdEzxNr6ew%N)9eTFZCW1n~jW~+2Y~7yF#aPt}U6FaEFQ>`H zeX6%wx<-6l#aM1YrZi}lmgut1{LB0Bv)I{i<}b6<%oX9?pBuHN56)UWc9BIK%zT)- znizZdI688Luf#LCtj%^;iYVrP51};^NL@lZq_n7hrAUwh7^u-6`F)jVDgo-*$<3!b zjA>(;+2rDSsVq;qq;49%3K91F<0=1BAN9ud$ErKai)i{?<+z6k5KR;eN0MatBp*s( zs*SdJ8CD_fC1hiv{dU2BL~6T0q~>u+_UYnRq!Er2lcTc-IE!D;VUrDuSr1)98cBaS zCf@_*JcT=fIw~EX8-@S*?Y=lXQ^~=}$k!&Pv6aVkWK|wZE-#)>BhG3*xYU~v_Hfp9 z6KDQ)lHNf%djfZ+1Y1fC&Hq()V8wf(Y`Zm%)b5R{)j|f(RD`j4S)Z<<_B#a)6#eo1 zLsFoN5_)idzUa5d@%@b`JT72m{XhIqLF$*u1%|pvcSmgK6iQX`Hw@+P_mps`qDr!& zv%L8KkG}zoLR1NUu_Lq*9t$r?#E*?kz%>oHx=;pSQy%T_Bw_xtaesd+;H`Cv56wU+ zr=gDUKoTL(1;R6$A~NIQZ6}x1iYn;7?|-r@kmUn#2{T%azHR?r!haT|GZYxrFL9j_ z1OTW7x{>uEkhk@L)?bqSl_~`mC?yJ>=&wclnd|uVGc5fMV%;oJHuC>to_A0- z5wYV$rIyl@f&1gEZxW)==+4a*GV|q<*;V!gn~Jn2o~Z#ay3mNIw{LR*`0LooZp-F! z_khmgrx0|NgXQib-|P8RDAbI}6XI1Ab?wq;)dT+NHqMY3ApUMVyTjC^GZKWV_Yv!xBhhR{ewja_tbkDogeZl0tI z#4(y<mmD9b2N+CnwC!q9;qy;1tq`Q?4DN&>w zhVJfCx{>bglH{8(|Ei80Ax?b z$NvC>u$~hE#>lZ=>yrZH(sbGOYIS<(k1A@Dl|lhLWdd5ns_~7R|jA!mv-C ztG=U2*-4%STiV;RKUy;)(e%P#|BY{{*af=!UpFNDZ13q*sz;)KYHp8Su^c_P*MmMv z!~QFT{>NGY4H)(kd?JPP{V@*`D?O6cTz8%@fq53GDW{{NdvtSGuQpS-Z$`G543PI! zy54Ri1-zve@p^v#9bh`Wo_&Nx?arK3)kjgujsoWy9fgXX@feL|j~qXJ>fv|?IF)e` zjCt=)E6BNyTI;{2PHZd(HSDaG(I^yWt3B*t@k>dI>%QjPB`KbiRaA^ixzP0y2$AH$ zmUM%Y78a-7w@f&i5WCi#)-)c;@SipPQStih{C$(Z01Jy58q{V|-HFliH%X+a9?x`& z2<}C|lQ)_DR<87k%lQl?(Wmt)t_6UuE&`~0R+%R9KEN7}Wk=f=U&{nwP$9VtfG76m zh)!o}g?)G)%po5TU)>1>zhTML>Ub`#sp+2XaiOW=0; zRK8esWIi)`kHK%x;G=~8!F~VHEfeEoQ1bg{7LHGT%dlrv{e8ew7~PGX1$7w<(Zjgj z<>tlN_F2jx_#N*_zuR3h&x13L%iV;WD4ML^SrRJe%_*Lt{Um_q`<{aE&(8PPY=8A= zUj??l%)de5rDgFm;`r7j+K)AuYrCg>{W9B?;bTSLC{2oPdwcuVT{;O;i=Uv%gB&rH zUm43(qQzMTje_T3#~Gu_unbs<)nHVA5YN$Otnv)^Hl(k#Nd3BljN(h zOB~=Cn8oH~oNP*z8DIp4qanibNpYl3cBPlpS9~Yi#au0<{oxy!YD|%+)~Zsf;LFK6 z-8RI{@59fZHytOI+TmtD)3lZs6`86M#^O2GxmWK{kijsJF#~T*?e&59N-9P7z0s|K=VI#UiYoGf&3oS_DEq?OggbQq#ix47zTw$wQ4w@65Hfse4A&XD<&I~FnV|M-B>Z(;L=8keVPNz)EtYJHVjq_96y<|?9LYXg77)4k?2&}MIp_-Dm&Bp;>w~&lf&vwiz99x`X81OEt;pi z%i+Gd5Smc+n7bw0cWtDt79+Jw_w`Wu2+tX!UoT~i=;YdK@Z~<4>5u9R* zsor*|ocn_&QM0+~Q6?Q z4VBGs81~E2sNS_YobiVbA6T&z%9%9Y2Nrz)h8c^oV($VNx7G>7g~jaRsSJ^`H5O)e zq9llJZ=0r579Da57B?c;*#e~iD#B>7N717ngi=y8#eRL-II8F)M;YAZjKTLV)-itN z_>B#-VcKl#>+J%Is-l_3s(R_~&u2G=(@7G-2KVqdE77rB=r~M-OrNsp57^(SvzSUr zmhR<13jF-w5_sLprW3e*FAXZ)9xk?aN?OuRw^A7gND$|9zV5_~f4@n*#w|w{gb@Ec zI1rRxzOp~1lu@)?-kGb}#|I()vz{&ryF=(WxUb|F-oM()D)979c(oV@EZAErGsZa(uO%7R;A6#FQn=SP(6eWs9U`vJl3*O9hZA7PGsx{hOFw z=Ucf&jErHcdvkqUH{LYGB)~vUQz^iH-jiPHbfK=@ z|5k!lJD7lXmasTeBs$=_aZarUW5vMF`*nP__`_%Ys-pJ%xNc+^@3pZX;T@+usm`1p zxMp>D9NKa8*++y}%J07z7AQIP3TIj{DS<+xzm>4Lp34(x*Lk;gshH_$^ST)a>@e9q z+`D*9Yyx7Zd4WdG_>1d(bIwYg>z%jLIjIF(yk%LDE+Ki|^ zn8;0Cm)6z-2^r`t&QMecUx1)F3V3o)kfA)D-p}%r5N9~+v?-GPX7jjDp@$vBcs#xI zDk@+@^h_w|e3`}@sLp6Me^`S`-mWLjTi%j&e{hzaz*4XS(wMK+>fm)Gs1_bqhCtF7QEjio|8ZJO%oV*i-}qdN_^ovs?lw)T~3Df?MnDc6m2AG)^q)rrtPy|($rhlMx2#Ay*~GIfvXnC_xtJe)>f_Lf|*1^0m0Qc7d#yI|<% zYVVk%(%>Q1`*_{V4@xmT?cNA|g$p_r`oWcZ{_{(3wdBXL`ga^5Eo^2ZCym^4;wh>W zh?Fw7(%`l$Af&W|<+RSKq;C~2kUc#LZ0b(wG56s3=RIol%3xmI zoSVs5hQjF7oAS|Rzr}XpIU0tRXH&W|H)1+uC}@$V7ZX=F3f+RJSdpO{5(mn=jiit? zUk*(Ga32mn`JlYr!z&$nTnzWIqZJBox6;=hVna%R#SFMLfydg%5}iOkCn}4@#`Atcl(2W)x(w5N1HRM>bqxIPUqlDE3p zIGeII@buW1Gvy=hhYDwP@epBYM@u0^fN}{Abk#(`L)1d9M6$AD3R8+|j+VBp$DMLl zjGWboG~~^c#pWOohnzYA#RL7dHNj@lZJLbKjI+m=;*IJw>BSZ1jdspN?ozZB=|7-D zfg6!nB77WTwy^D>tFGpj&JUwowI-eX&@KKwjSO}gowsfw-XP>APlda#Fgxmg@hU_E2wz+ktR3Igj*eIqYs2 z_e!1^?avDkEB_?8Cab7Dzv-Kp^>rZukWb^9$yw%a$6>o7>g*oWX}NdVa>U|gZ0v_Z zu4ghr-0BR<{76vxTH(YKxeQj`ai>qt07he3aH-E7A}Zb0$%f4|{hNaYH^PeNsUXL! zh~7Ac*%@AiBP{WF-6!5(jC39Zru%07Hu9v>af7k%(vwDy<9J*KuIjv2@5W^(Lw4vW z9AvHzwZ06)((7Z)IpdKnKBtwbvO!ME3)hGLBDS`_AY{A1*~LgA&?qC-Q$p-T1V z#1yyQ5@1I>nD_ zMF8Cpd+j%ztf>LOoLq5~zdkKiA=vf`Kca#%8iOAGE|}--txO4k_vmlcKLIpznCG7b zpPuCfwrT(F!+06Q_vurnCEl@=9ZA%E-gxIhjL<&Xp^e4s&s`Kx7l|(+ZVg5w8L5<9 zT*@pUaLO5*xPg9&Gji%E6lo_M#{J0hHJJlz>T=6j-#Prx<9 z^(VGf(KJ(=uF1;PrutOdWRsj~b>udXe0iEJd0tv|>LgFWZ9SF_Gdx#YS)7x=TgPDPmQ|@>4k~<+|}HYzFHCNxR zZ?|_2jCvzAG6{H)vdkA2-z5c#Gs}ynFL&ucCtjcJj8<+z4DTq!Q>Z(`mRUhkT3TAC z_rqwNlV(ZJk#_|jw*|-h_Z?_LIj(3;XQz>p6q{qrj>R4+3QYc05?_&v*F~c!KjX$n zl2Gr@*}1QSHN=wNw;0Fy2vm>gXjjixo1HYa)ses&!~w`OeY$E9Xe*&)a^$m}IZ-n!B4{W&Lheb@J*1uJI0)`2AN zyWN`Q5tVOOWausdfr=HQzE!X(6rA(LovNCU9)UZI_e2I;RxJ?FxM|VteLJ51QKf?n z2#)jX2tb3jV8xL~Tk+)kTR-}0f3y7h@?I(8!g#S!Ku#R`UbDr)SXY=?5mrOX6j-) zq(?w{cgLlE`$inbxZ~xSJX07RW@lj1=c8KP1W{phqfn~talUCPNU+uFN;(ADzZw+>$v z1_jx{gb9InULQ{VOQV)LGm@H^wQdy?;gZg;yFoyHM-s2J$#MZE-;Xa%l3%>S6eS5{ zjHGb+tX6M3<#>PGU-B3P^DrIHU5c4gQ<7Waa6EIBd?monnJ-40?Lz;lMv^x<_bMy$ z%!(USq8|PH#il6Wf||%Kzfi3-8VgXjPp?|~Vw!c{OFN0M1OxMVF<-p!cc zq)bh+7z{ztv3qh7sJyR!WwOBo@=soLqwg$Ns@Ql_IHLY2-Jxi`+QeK;}wBv3uHQj`TCMA6;u%CK0~oy+j_2I9g2mVWmBGc)jFleNP&AaIZk zIEtwg=xM7_Ph4Kf$>` zm!R#Jp|d~PzzM*yDXY_q$8RNmBn?Lb@teEi}&Vlar6 z*4aVjIQnb;yuA~FpwgNFbMT>WjLIdxD9JZ5$;=I$s_{-?EX(Ex`dQv&iuE-P`=(nl zwH8+|nQSp(z_7#we^SoLF&H_G@FVT#f!K3dm<_~@T>VIwNTp19-+dOpLYbsc3~lmI zvYbk@Wj8dV2dTA+wC^_s42!+vjR#n0SdEE&OmM$8u8Ihw9jOV_zyNjL|BXpfRA2m! z+ z(U^fw0y}g>%QAn<+XwgBc^bPZAFJPYOn)Aq!^2&Cd0b531%BRM1&-&9sc6rwa-p$n z$LTNV^IWKv-Pw$9N}d^q#Qg_9@ULqDa7e)C$9(}sBm5kCTFDIvp=@PL)Ayor&$S#< zm+>|LBmxta&p@FCTkg*r%#*t(E$Eb)g`ewM`v2eJ5dM4*#O$tyn%YGopFBM;VRfB1 zy#DDW?f7XNqEKAjSZ*v|Fzie4qHIT{-x`(rcU$b2Rk8dS0kRhiLU(r$enCh~c=rjJ%S+mP9 zo@6**ce}Lz_gVneY5WF+Pv6#|DL((ruk&}w9^+x|7O1T;l8{=-e|dg>JA+;P=lvZl z1`d|Q(vsF}Tx+Xb6sJ64mT2tc7RT@i4^X%BnKYQ+wuZkI(K(>?(J65qtb&1qLzmgx zhPXlIh* zDeBHW(qdE(xQgHWw-zB3Mzeoy46#eN;-3k{iMRo3PzQcN@?VD_U#kfmd&9z*3D#~0 zqDR{L@}Lv#bq;E3Zf?)&Mh^$+?``zYv5+&=kokq!ZS?g$0&sDAl)Jk-N@_-tZ*jqy zt$aUi&pQn`~>C@C5Xi3pr=h+_uwf2++EaCfgk^$Q3%j>#gu`g%!rMRR%x^Zfax z36qZxcg7E)^IV^Qvxoobqp&2P+iZ@^D^RMQ5anC3d#c#s*Rta*{P%_fx&@fNENuIl z^_6-4W@E$z0waLlfcbzs(La~`n*;FYH_akINcfH2kOnmQk3%^evsEwHB@An}Abj5L zb4fNS?Z0vE|Iuofk5E4I#qw4@Hz4TAIk3^;yYvM}3rMHvkvOLH`hQKiP*lDsF#CLN zCBKyD)`6tmf$!)_ZqJvU^JmBClO}mk&-}bzm2R$Y^ej&P_jwPD!UzQ5)>e&?v2bX#d7{iz>j8R*_bcL|WB?t3TidJ|TG$=nfnNL*_4rqL!r*|lfeh+P(*Ia^_*(5AVGsZRn}SaS zqQ?1qHJU|#wtI^LJX1!_FPm^I=JW0;786N8MCNn>vAyq~HYLPH0~#sFoRuE)kF|x+ z0UfT{F*sY|uJ6gxgn~w<3=kCGmJ)gm)xnD^;QNALLZaFZ;)(zVm&RJ8LORg9VjuIK zh^W%>jB~bG=T$PVorh8sM@(_6q~E;wehjm#*j$m5Cz1<&SY~CkTPGA1#IbQ^t0P= z1DIZbW$EDZhSl`g@dF(rV?=X2N8O%H8ohy-`#o z6ZuNXtQIrb8tUIaBcC?1=_^9x0&)J?GYEc8OwYxs+{PHQRl6d!;=2}EoniQY)@o;O zV5n-;$tqI(JyiK3h~GJ1YATm$V*-+=3ESg&WVH1uXR|wj0Rb6kBt4vT*}aGycK1+C zm(~>5T1&LlGMQn8-M^5pz~Y($Tr;)ByY|Vi(XBxm#v-b}ecDaKQ+ApfbIU(>zo1ny z`G~^6qDzn&$q=9mI(+h=h~(KHG^IB*8{px#ET7f-^N48p2Ye2}+g07a!j zpYSVOT=6I`snWIAkhzEZxq^i`z_bN6(DHeWie4PvzJkM35NC_x>=$n2 zA}GrmCSNrV>TI7$s$^wgNbO-n*s3HDgFbh{=H2rb#)Pr}U3Gh5#|%iwn>2lYgJL#w zTyoeH6Z6R9)%y{qZu?zunFB&QYr8)$9$TWC9`@djijoo*kPZvno+=RU3VbzjlaGCJ zRa{AvF^zxUa7yrUC@3B?7|4v(aP)9Ia~Az%i^TjzsnoJ8J&Dc2{Q4?Z zgC(7xwUh;&VsZb+2;HE6^4vqIy+26pW+s6i(b#J3|;Ky=KDQ+FuI zA8_G}qY_O_>@ZYl6Lr((9w}d{zg5+3z*5JpzylD*EyKYgK;WU+$$0V4lYsOi6jMvG4pnx%^gwcrUwzX=Uyp#E? z@e%OgbUqspX-qI=F;~6tSxok(G_20_>cBX9i2jYOW9*?xFG&nyb1mq}jb5v`DV(+Cgn1GSyu@R(Be2%z%q?iN@UXqf=maiB{qL zgV_0xhh*3Q93W#F-hAU)JShK)TG72Px@sZc?h)X2RwK`PwR`)<3@PvzKy*6 zHH*imX9F6gansvtG(l6`)e5`AaSQl<+1H3RsG{Dk5$p1HJEPGs<#&q@f^l1QR6dT@3_&t zllMM6z~YEGH>CV#dlyrGP(4ivDsNBcv)d!)YB-;te}A;+I}dvEMD?bc;;F;h+ACah zATp+IVY?#+5S_#7NDRkg?ncQqpg=JM8dpz$zrNkrIt)* zfrb#XZ7T900+Q;NlW9Brma3%CY!zp-A;}MDHvuQBXR4ScSJty^+lc4f z;iyA~4vm)+d%DfX2 z5|~{>crc`8X58k;mUfC2O74AlXdV$_=*MWk1QrJ}-Srg<+A* z{<_0DEjCpnDXA8v^`XEfCxke+760SuAYfFWt3;I@aQ0LO1^%rC;AGV4dm!89c~!Np z-JXgEo202>YRu`jltG`HH`V82t)l2%Hht|QJRrZZ!WF2eA%QZPy zrpIfBi>kFoP;kbRv}EA?Xsj^motn_X{&>w|%>LD>A~qe8KVgFYNh*=~2p#PkVXCb; z)>Jlvo2__a4O5|a&ESg%&9IM4Gw+_j{2Iq4k4WMj3pA|139lp2)1!gBGPGKO)FI

    Aa_8POP*OrHGYuKDZx&!y2WTvpC^XR%)y#E0yZxaPJiy3fq!Do25nL%sE| zD%c(K#Bluxn`Hs4EpSdsU@B~Nhz<=${3VW}^H2<3L1FtZW2m*~(Fi|!@f7Eb6AoP^Vj%4ukAV23;L)e=uOCQX7|)7y}@Y=^E5q%uVoKCtB)pgnRV?kx*!A z!hCM#-njbRDhE^i&|06v@TCbhf+da$76i zE<)8EVZ1)T;lne8W$*NzqfBZtKWoG{t}e7Dio30+Z{}Wo$+tjtX4I$uc5_7%Sw-v3 zc!>#WEK>3U?dr>r^NG&o@%v}KNFOL`AI%bDHoAilOFbrknUOSCyad36$?`GmIw;7s zTJa$s=F|JwL@SoaBddR=(;Go{+E@{vhC5?cZ@$u$w@TdnO*q!v6)b3;2-fH8!>Y=mQyG3hxd?Z9e;ZHT_8f z9mD{3(H&7F|7*`d$_52$&IDlPLx+L4(KKzW8Hp81<%l1O?2j^h_5pw@y0(Ya`;ELY zgIwW?YCtU$cKcG9ZF|PVy;ObH`1OWy8yH->7Dm@GFez?5v^QrrGOR@~7FUM;?c^@9 zJb|k%=e&aD^h#I%`qYBOghpxIro6&IJ@vNqfS#q!ntPj_MI*;Hs4BMb;e3vxb3{?E z-u~$2UFE8Q(s)SY-U#gz9L`$lp9bHxyc}6c0Pyf9NT9*D8bV&zTSY|oHV;}JT!xq! z+Pkjd)S@n9xf1PMo9r+a6D(@AZS0RzbSe)Mo>sZhHtV!z0z<*vVPN2{uA~Zv+3p&m!S9YX@qk{?k+dTMAe4*Dm7U0g;ElU=a>GG;|*ZG zI}DSlJAsxGMUk!1Y|NdCDqZVn6hUeh!2sQY)*-ceS=q9pnB=1hy%-K|m4f>8zL^~c zBu31_$?`o2-gwS|=5lV5sd$y;NGNOjN+90sBjVery7m<;^6p2KnmZGlW()lx^7}Xv zj|A{^MURT^8p+^1IREyz|LyQ0ir||B$5Y2p!2C^C+?}g1;4MeG3F#?zZO+(aS zPZ4WMn(oQVzGgeeoIR!2T_1e#0P^^Vto22cyMBJScd8gPtX}{43uYpu{P;94y74dg zb2lQC0X^O8knRbyc^Y4Fjfu~0AE$nBTCE;pl6`hE2zd0Xp3|^l;qhEwqBqFa7|$dD z;x1-23-LZc9yZf4vxDZlPJ-(52+vspx7Pyb)h}jit(aIWCWQjKu@!h|FU3+>{p*`% z`Uy+los=}+59tT1N>>@{Q5yzCZ0(*YN?c}+E-Krre0MPUp0rWtbGDUS)9@ohFm$)y z^tP5^{6@{>DC)qwsS^yxtFoUBB$hDLCP%b0P5`)l%FL@Rx7r!x)x9FFpLfRd^r&p} zv=bVZac<};HyXme$5xw-$hH{j*L*SrbUN83 z58)B|h(1;IN|=Flo|=q)E4#kd?Vi~QzuUuepkvY(BPwu5TW6>;2IYkE;%rxA#yRj5 zSWi%68Xcij_`?tzjLlhmMZ*fZOWrfD(O}aeyE5*1wO3t$?EKIdMV|*Ux^i{72QMnM zsz^Iq-3~7Rf1?X=e#Wr7Uz3Sb_XsO zi?6uwTTXZ`_I7u7JM9cM7P+0)#one)Rb71-U$tU`n8LrjtUfoH%}lShJfWmzhKjNF zf^B;1|I(qjdwUPJJMsy9*dH&h*U-6APVb7*O@yIH`5s@&!iR&KX!tx??%5)1BF7W4 zslvcRO;w0YZ1d&=kL#^H{ni+jXl6+>E_unBgRth&8mT}a#;AyAXmm!ai(`62D&@tB z#1)xHloZb!Rv|#+dEe@Cc&P)VI*2pz<-Pc)hU%g$zI$hVw$V#XguPHmC>Et+QYNt4 zAGe=y>$bukPznh1hmzh`_Yi>MJUQ1B!;cja4G<|5<<53TE1{2VzrS7fPM7(XJO4qzUS8?g=g+g7M%vVyY03^?l zY`LL9#%^M_z0zAqdJ85l>Zj}!jedaD+IwlG1C7X5D;n92ytBQ->gR+=$C1QxW^!A1z6GIE;RH zo~#pUhmgpAGs(SMS+c_4(tz5XdUqq%lXp2^S1!0+1xT!C;3@>7lFDH*$63ElVI-bz zU)p^Daz$8Q^g6KerhDdd+A8rC7CzIxYmkq?zxU47AJz_=cJ-ei95rZgQ4Ms?rAe8jGYG%FhW73kPU!L#Ev{W!_IPjmy?V z%JKg1Z-0I>!tahyW!N^r|4*g=Zoxml(MEhqJ+9p!t^59eossc)lM&B&?_aI*9|xUS zK$W{KQ;696U!CdqcoP9ceJuXJ2JL^= z7*z#MDjiFum~`&L|2pH3n_vyle*3!*{@oOS5-e&OKV7w|aJAR}Iz##KCUkL1aK80_Un)n#@Qbj?H9?rf7J;M<|Z9?gj~mZuTP_yn)>p?@0s@zQW9t%Hd*tAI5e0cgws^I75$ zyK9(<R|1vSLznTNJ)#YsL8?WBh@dUjfjXW*7 zmiH{cDAn8C`2M9C%)KDZs4KJ~)9qEtbRWyRdKj)FQ)A1LCzn3pC8gK0{exiyT~tc%5*3?I?(cZE(q9ZcUVosb+=Q@GWQqw^sWbBtXI~>^Q4M1R|moeS57za z(K~i%@G%=v&_)J3Q_1mptmPmdUpKrF?F6RcLaGI%&Mf#T;3M}BN2r=GVf~R#(g#JS zU`wTv{jFc`Lvp&9*DkO9^xCvnGkSO#FZCS$;F=m0JfF5_8-6iz0Z|XOyUlU#% z%H!NjS&`RR+-0(GF4s-&1N?BlfDXEu*~tc2$D3gCiL3okN`nwvi~gAiK0?)GC=1%5 zXgG159Hqv#R@bRi&6~-$sHa}xsV~By;cp-Oan&CjbK5?b9^FJ?KHpy;^+$)zkxFFI z2#v=c%lQh-<5I_`sYA^#-!Z13ibKbgsM4s^>_sqU1vpTSkLHNd?BQD!ha8M!?T^T1 z8B&oG>$u5*-eu@$$V~=2G`u`~nDX3lm?!zQc<4g`B8NwmM*-!{o@a;{Pk`Zi$UfwE z2;ZumJ69q3Ej5fvo6h5;&j2S=(}U|N)tELNDr?Z=6gP$5o-EGKc2=nB>I@<5xu3Se zJ20Thm1;;mRbu^X0?!LVZH#{z#(RJM17Mnv*<>RDX<1#{5)Cz=Htycr?nm2f42c3^ zlc)(n5DPcGJx#4>pCi(k1ptaC0fKC!Yq3DB9L6Im@{WplvWTbftL>k+CP z6{79m;0w1q>?RVb-oWfJmpcxA7S5v``DvG_noCeeqfpY@9b76EKq6ld#U`v)edtkP zb;7Eqertb3YO{_cQ?JV)_@GSQ0uBFCs0$&h%6lNC30AMCm^*@9nz%Rey)OoJ9GL8Z z&(^Z3+5$pnFu@`eO`UZf9?K$1-i8sbvwfYZFv(8tCdQ_km3=&4=OPGLyYxqL3vF*w zzicZv&}cm~%HckE*%inxp)*@%6PhGH7G#re0(dL@EI93W0LW_WLc_(*^+$#z5+~3? zwjX=*XWoT6lT_-Pq?BrZu0Tw>%_T9Xjp8P2z=7bRW`#U_ub1vR4t&bqS>BtyoNJi} z!`6h|7>24gJIhNl(1QxoNX3^qF=t%yhuU0xw|b>AR$K=QuDObxN}=2WoX+vXWOIgu z8ZVDXP1eO^l$CO(4~X1OUSztSyRH;TY8zimOxW4(&EynVs)(~OO_4JdnS+_?jKGHNgTVn<>{X--nTddb=8e&ut#;K`~*kxnJc8 zbF3^op4@HiMAC}1LUr76;aJa{m*wvGVTpMHXCwahU5H+9lC&fh5Xg!xhV8D(Fm61r z5<)HBd-@dxS4vjuV7{*4RXCMweVi{$eN)J}WttA}enW6y#8qeS7*wjYJsRvorIAcd zxj{k4eUKGCh4*LZ_vapMfGcO*?grn#r28Z?N1}j%sHvZQRec>82Xaml`3F%Z)}mGL z7e;NK4ldSqV6Piux!kf>f7t@9AB8k`FVHJ9x|+5^xo4#knDbZg>3}?L(cv0yZ|9u!jy)-HNwS|q+2jn&8ZFnqe@)=3dd=9yYi zyuCbH^HvAv!q%YbO~9O$UUf*KtBuBY!G=vzj-OLPtaPTQ;8xZ4g~z{50N!BF5LWHwUlk;$F8 z(v?GT=$YOeU+k=x_ofoD)^;;aB1szy-0diUoa+=W@G0dwb`udw%3@e$9_u z)xHslr;_*)U&ZYv_O3IrFI55JN?<~MZhsoBzQ8Gbr0u;s!inVF-DCVA;^Y;*18zZD z)JMEIZgRuK7EkByJ1%L-18ZJbm->eC$!{6`8t|2W&JFXK`j4(xX65oQr%E zyW#IbIl@E00V;;nE?L!DdCiWN3y_4`YfQn8-PGrMGd7A_y@-NGxL^&@oZV+io)gZC z5a8bSD>Ey^nFBQho4HBEQw=7v9tv3yW6>*<2TsJj%6NCtXlF~Km$SuoPd)B@bo{kt zY1a98hSJ1Y0B?i-Qjmt?kQ*%s+NtCk2lgoWv=0%iq_cAxe=^>mu?ITa)zM4@4Q9G7 zY+uJ=_m6-F`q6A6-s)c#dyP5U2{iQ{9L0#Qg|=k(XVbkGvvmwC%k7UeTMZY{Q(G>p z_%cI{or5J}6?=q!KfFSTQMM#ZsKa|(=yEp59&2K9T#|!jY;<$^U0=cC4tqsaqI3_M z>HLEgr{=I_ECd@+$&l~s$~+e8Di0Cq1{U|tS0R(4uxC)up!UCieo;Q**LDc^tuY_| zEoSJiZG0yV)MYqI0U6h6Gzf(a#tch8GG}noB7WfwVpfoJ_Ty%}zB-^sW9OulV#DAdoi7QIJrc%hx+(*e) zJ*7HDaX#KSF>}Bcd%$5irzjHhVno+_jmT8m`s__1{jN12iPyDcA{rm_S!p>+4utj- z#v{V;G8Qr`d4&C01NXj5Q8fPf=UUz%Pxzh@$pK;n9wu2>5@lWs7skNym}dl*jM~MB z*TVM4e#-tBTfvzMAk}qeF5P<3G#%n>f8;yo$}F*u4{cwC!@1&za9sAjOE#BYTfB4W zY6=0!=P@6sCBKV#Wzk@0OLg<>1C-Aq+1-t$>*?sSpYYzK#qV|Rx{J0y+}hkwf!L+U z+0g=CF!6jx3aYsz403eixEAa=ItKSLsYMy-JGVOkVSiU*C~VZRPgQfd$Y5RC=^#4zIY$U4BnOZ$e!nJIUsU71;rIMcj@f(1Fwp! zJ$IXT-R-07Tt{}hyqQ?k?Pp9=SUT|g+$1Mh^wIGZSP5#OMKugVBwB`_-suoDlMLHZ zzHJ*0`Uq%XRH&0q_cvr-lVYW}a)hn+Myfzs$cIu=v>?!i>FbFop?~d~`3gzg34E%I zjjquY)_vvMD#xx;qAq{kRt9wF_J-}?FTd5l`LzPrkvFcQFAK_Y;{YKVdU3D zyZDl08zT>&oyI4<8p>Z`68pjtCx3|YR71CNEJcG*>u__p+IzfMsf!aCL7L0n>u5@+ z+uI~qS0ahgEChtA*&E*aY|6q+vv#9?+h_Y#`1?+ zrRCEFgHz$9Tf}U)rNy0dF+uLVVcizP-j-P$#Gd;rgDhrqxjEd3a1EE7o7eafT51Mv zVTmo8dAH|1%tg2DAN9ZLNxw?rTY8F!0awVUQiG%v_UxwC_xhZ`Gp$gqWyY~|0!{s1 z;PYM5H7Q2=WgP1=5#B|4%kd{)5T{YetD9|DEyW4B5>W5XmPd%clusfo_~GlpuJ+fe zTL{Z6BbEy?NK}d*dQ4%Sra}9RlV-c)zC`8iOU;F|nI8;}B8Mm|Zjqt9yCzvTs#KCz z1nCd0j;~f;Lt}|W<_yKmoM4$Cic@L~FxXh9iM``_BLIuP-SE|A^xCzJ-QNmjGXHy2 z`}qnAq#DCxXM(yo7Zy4o*?`jy?#L_qEVOEo=E1}k(?-FKT_8PpVHsC8J=ogN!%^$Zz98Fix9jrFz-vE#O$L9S=MGm_ z8_;9PM76wFbV@`m5=j|3kOxkyeEglW4|BYy74=p5#x!r4#AnCRd-Vs^NF`@zy^oGz zpMKQ5v+=B?e(rmuey4FU+=T zvTE@-(-_ki2C+m>_FljIYKgPbI6)UzcChGnk*NWREqX-~_w-3%6+kx6%R7i&^OU*F zevzEuxN~l%l-*4k^u~XpR{3LaAUryGRJO4`RCX0VwkUiM-;W*t)*&t9za0l2uyIdZ z{it`6=mvSXJkeuo9HFT7)(iqKI@YgdTE6x?ZJ=Y z$@$fZTrA#)T@isBvSS4OHIeO`a@Q8zTuTmTWp$L1-)D76?xWghC|8NwaQT9%9bH3> z*qm8qlqA1z6TLRZvmkpJ6)Neau|YNB@5%cP*9|BI+zpJ#8%ycXefV(rr$>hep|onv zA{1UKAU@J*A` z7b^!LLEQz;iArs3Ey}gqaGVK(a65>jH1D*wNL3keTV1Vh>x*)Ol8T8ep4ZoJ8Milz zBxn2mLjZ%9IO|L%;;0;MLss-yXyJ|i&ic{6hU{Q)WJK7K!nz<-Rw(pc(uT8s+R(%hwPZe~KpW7iy zyu{O)kK9vH0&Iwvq!f@wO1eQ%x=T{JyOHiL0g)6W zHr=u5?(W)@bcetO>F#>Vd+xdCy>rhSgW*61?6uY(bIpIwFFdXeiAS1~J(1x|lV8a` z^kLge8^EtWk54W9tH|jLd%1&uS+_X~Qa)Pn^EO0z0WJ|bhFg`Qk3(0+Fo=HU2QQfY zGo@F<`zI0Y*s+154)l6DWsQ|?p|{#PWuC&@>^4d zx`6Y(v+PQRFB8M#arAdYT81vN&pRgIj5_P_IkKQn#j@}Cnb~lNBZE9b8TD!^HlhKA zyI<~nsf7N47DyyZsoupu@$WZ;*3kR)JI^iMsHhoVcg!!{X*g5zg+|-akaOBlyloP^v>{BVfRO6uzsE z7^WoasU>GWzH;8(%SR=QfW-5g;UOmS1*aIeECY+WYgSlcL<6iT>-cg|GM6|I=5156 zK_|jHo5cqWlS9PCjGy?V0UW#zeU-N^0owk&KfI;|&^YX7FvPu(uy?N0zvx$ogzXp) zuq7N;;Iv(O^V{`D$Bf$6#4JGlXq|-OJt-|zc85AWnuN|Dc+B7jy-*N@P%sE zhP`*vcE>I*xk4p{+bhcjlG+$vdVFknq;2@Tiv}oxUq{?avwEDG+}s`Qs4di3suq!n zirgz`M2}+wh^@GY`*j)J1;7RhTEFW14IBf9XY;iw@DUOy-nexg#7Vsw*kEPTq!P$rA zA-g=YQ|-W&jHL#xG#7x|#6bAPWSi*Qohv=GrY*#7VeP92vdKNlgXX(QvYG@U(e511 zJfKjwg>kEk@F*K0FnIS1X_)b)P!og8*2#a7->bnh2cl z%k)!t;ROX5<)=mytmmYRJZn8DGxhlB`;hU00-k5!0MCvX)G){ZzMCNqioT6P zt}tu(<8xG7Z!qms`ODpzA8llAD!gCIbBbL4kPpxf#Eo+Q@JAg;Owixj zGC4B(@u+2;EpMP_v0mY85KYByN0>7P-zNiz7*MgRZ6~4?J^tW({G(i_N-9I3)O0#% z=TjSFt&ZFa$9}yPDX8*jO3)Ot=`!440a#uPw06*5`AKxKb-GvM7(pp&z$(8O$G@<9 zy4XO(iSOPR`a7Y6=@vNn_lJgP9N0TU#@8}BG<%^v_O>diI zNWEv?%QO_|0oVFdDqpt}*sym&i@it^sQ5WQ1+m>75u9BVg|(f&?a$8kr9Mh*BhJDN ze+#KWv5dyJ?j6c<7cmU?a;ea&=+R&nH`D%I(`LCpB4}-*$Ay^#%BYO`jcT(9lZnB; z99k=0cT~D2qeSyAPbU&&#N#~47VZOxsHI;xM@D4XyRVJmjSH8P+C%#?ylMFRi~ohNPc@2T0Y z9t$pJ8O7KR05l;lXmbEGS(Vi+2~>U~u4VG-0KZUHbh6%heqSWhr8r)x-d5K%DSntL zo%fGM)K%0ouo8YJ`o_R@X9N3G6D<0%=+rv=p#X0xQQwBmJMiAmZ|9f9&HUbYS&`SQ zKAM_4LNWQ0xKrce->CUPBpXBWPO0bcKzPb4Lj<6 zVx*gvCWcUE$G4pr7CT8|!z`pAaTASHYqZv@itEp@F^6BYa7Oh*!)|Z1FCSzP+hZzT z1lD-hL=P2}fc2scLyb=z7rI{8ik)=b27M(4}O#os+I8xWnDPC$AS5>fPHV1Hyk$`)VMa(LLGPfCq ziAkYSp2QSh1PYz*3+ z`k}EUdey=!cLoLbC$lf`w(I3AUYQeZc1eWk(ghkHUl^*T^|a{@!;#-35$sXjIb0uj z931%H8fVNH$ZzCcSCZBTe~Yos(h?poTVu7DDP6TLwT~B6xiO3szpMl7sDhS(N}$Qm zOd9iich>!N#*=+0stL`UYnNcShr3RlQLRL@(>Bv(h*MM|k8xSJA0Cke>9xbXSQ<9^ z?&EcjuprxxZ}M?|`ob^0nJ=sPpYj;i&v>{JUFAuKecwDVwA?!d3lxYd@;Rv`mJ{B2 zwBK`RYKxBOS4@!{uzQR0ONyp~SvT628s;zRlV&5P0QbHO2@mPYmz~`_T<{E?T3p;9 z^Aa0-#@ghS9cBe@(MHU4_zCsn6hv+%7u>Vb+S;yGKumX@KH({A*;HXn8U4>Pi^Vvv(EjM7aNB9 zJ4XXiUn86gm57iVXGgsh<(eqbmRV=fn$3pm^1Iz8(Ti3^GB>}4OyGJ5Ro+hjB87fy zBZV|Nm7x-?H5bVKg2V&OOVLxLaLo&R6vup3>8uitnak+M80wfl0XHPGF!~beZ-VSz z$&2;fVE=^;f=jv2DC|ep_dEf@ZtFc#YFe`D?_t{w_NL;7j^XuV0MW%bhmAhe`wS+% z77eED!<(QgH7G(cr*Ya>@7WUM&=h{JI@Mu+xI337!1`qn5PL3h_j)F%He05db6@;G zO(Ee+)_#8)UZPRiRSb(^p~xYCh)(iD6EJ6!Q#hQ}_hSNHlg=g8t+H4G?#G#<+pB(0 z?26K&>9V+t8krkz`*T?viv8S{4hYK3D*}d{LA~J#10k;{oafb^XZZtS6howb_hkCM zH@~%)dy9&_c&;7*wdp}Xs+Mup=PIp4vsKH9tGFzKj!=8!(@d)#jog=#59VV|XImpP zjR1+Sg0!mZ7jLn8X$(*l7*N;u$y+?KVW+)fW6^IbIfEGj`umW%N@JJyMqBOPUityt zuKU!G*(f(dTxY``!w8C;)b4pYpiD5%xuA${bKovMXf{Mg(B?{-(oMSnxnD zYVF+xpN>k{pzMEUfJ73oHTu+_j0OvMvaddnH@9%j*iB{Dr8|bP7Cv>zmm<>gS+i)j zeE`ZOxo5qQluI0zlOI9Bas#XHwvb6`nDt%|NVw!C_-O=Tqtjwsr!5qKSARA?3k&vB zF9B5wez~R%>KY%k9G0mQ7U#558dI`1YMQSXl>NbwI+9o`1gP0ir__J-c6%P{A-0gj zX&2JlIjVcN*-Yv;jm~GU@@fiP2h3cUF= z2Ca5G(M6yE3Q?P&Lu>vc|E%YmfDKCx4zYOn#r7@+QuGk^>9El6g@ARQ#F0B!u#Qq2 z%6I}E9ToiHP^l#(R?tWdRW~ngt--7zp#urMh}4&Fq6zO|kb!=8Td&N=^W7~EBWw9} zRtek9CQ=$~y~z&B0q@uY*-|d5NeaCxid8LSaMElr6f6H?M9A{ zb(ZSAVjM44J@PUCz^~dH?{4vTdC#jA7d$`Bw_E98^IuHSL(IHM9((JP$ux|z;LeW( z^M}vBPKvWW&WEsS%_H>^_nCW3Bwl)qbhy5mdwfB24xO&iA4qmdfqxYKDA+ys&i>dw zyt%Z&KCpA8_;(}z>^b7Oc=c<>+fh5L{N1}}$Q7Qox)B%n*s<4T-5%ifxykwNUkPTt z2DB_R58S#V-%(D&zj=R%BCxyp_^zE|e;e_&T!C6?kI&~-l$^e=&~Kh)s{21>FqDeK zehO8)%0_{lMjp=3%KdhZFd_uWu5QsDmV+L@J?`0nl=~wdLLz?6ZI^~b5U|nemkgf& z#IbC;9yjhUy?f=eWmUTdNA(t#tQ`}MH@MISp2K#jUlOdOp+QQf5k1rOFatUlP*ODdql96#eQ=ejI9+AozH%eC)r!QmefB z!|#DzyiEaxN)mu7<^d9lJl2oGKWA#K+%BMXt74jq0E&(QI+~!QlxCg)$Q!AleQW`6#r$SL<_&sH>J_m$G((H+U3V>eRtu0m%}Nk6|W;* z(@1Y!&&yrYygT8W$m(jFh1|-zPoe=pxp5GcrG*w1kEv=RMd;RfXEVj_11jUyk}04I zK9M6K&+R-n9aryKs&(3fMcg}uylt~mqxhs?Up@=akBjdg8_HJBYL(bV)gVHWdupY* z06F#dGx*PzYNPjCDSXHy=J$+CUWp#1yfQesswrf%UlY=-bOeE(`=@d{(wc7A(PWN-LYD%vyDyA^XP>bpX&R0ZgShL=p^iMJser6c|HeV6h?S3wBg4T5A{;Ywd-3K z^@24dsryVmbuwN!q+=vde4wfSJ>BvdshE3sWRwR+H}Yi#KOAk@ZOhchJN52H_wGp2 zs%v2@c){bv8cQ?R3*2}WDwQNc&{5ritL5!>IPDW}M_a%Ty&dKeudchh0tFc7U_l`@SP!r> zp%}8<-Tg=5)np9b#~oAZY4py{dE<70~6}XMM4*=<}oh0KMr~pyYPcRKaD)eY0bl9FO(& zrx8TnEs|5$Bab28s=aMJG?LSC0osv#dIkpXA1t0%-V6MN8@^C!Xbl@8Qthe zvgNowWdSv|tgnj+$cwwoq;NWon9wczqWu6Z2Y?0TN?c)(+CeY&d{l%*fbT z($k(5W_=k><-PBdX`kiond-zax@}u=MGy9s6AU&yo z?Lo;oguEvTG&<}?M~x~!HZ0-7gmFL0i2|-#XD##*84&i%Fml> zi>2>4m`dn;FG=+jwpt#g<>@rZdrlc!!i;fv_@)+CJCg6Eu^AZon*=OOU#UM_SIYnb zHJjqlIKoHiH(SCl1?^e+je%+fF)3QJn~$_Xk_r6kNEeI5iPHNJTsx+)0LS!8f&9)rv_R-h8?V#@q z)J4;G=N6i%!AI*bEdX3hBOJ_i@nmoZp#VX4TPni}nhp4R0tNdJnm+@6L}}A;KdRJf z8dr`pz97K!6v&$(_eCd6``Cev7VhwFb+&01e&OIR4+UcWh8XZ2YEPYv2((g0*=Xh6 zHIBLNWHOx$KlpE4%k7qxiB@I@Eydm~q1Op+cwK|=nhSgJQ8aE+1Yj=tnY?Svm3Tkcr+J*$qzkw%VU@ zI3<_NQDn6|2^OS)^QQ5;e%xbjXX+Z_YN;gUTTxhb2twJatzt7>*V|vt2^TVaJ&!qF zIi~Jou%q5;NoCsds*X0KHR_rtp`Q=AM{%obbcoFZ{F0|n{S{c0-04d+E@`5Y5vnQj zdS#US?t6lOG6|E6kR}j=ey36h|9EAN_i^W4Kw*;2Vr_jGA@3Gpq3)upaHT{k(n5ve z+%APeU+c-O6(nC=i{HobYqE&N=0W|6N?3+D!i>FonNGA^T63c4>F}hyOsN)`pl<}# zf_a9|p;6Z5c81@t^NJG7`6|#5EXs2F9z6~Sff!61`GCuS8+7gcS#@?e>D?4Wn~ZZ| zBdWfVaZ2_l6NpL+tv28XOS^0Z1Z>|=%RLiQG~h;LLs>aw3|WvJ&#gXkf`rwHt6 zF1a$T>SfpJ6Zz;T^*`D~B{WOHZm%sSG&Eb-KV{0MvJ@27=jbw`M?c54l!~RkD8OT! zvYaU~tQud`gH(;$W^tGd^z*Ddyv|&BkW@pwrybBu4i zGO$kNTJz(4?a-PqDV?b`Au*1ues^ej+k!+U##d}+6%DXFdbN-!Z&(6SwUPJPH_lD& z=>%Fz3@jw~yV1&i-k^`EK6@mt3$qSctGY-VV~;-I4V4yM4{R(h1Z~6`M7tbAsqb$M zMcBdMT9VPgK`aQ2te{BSe3dCK?czPo6pU}k@sDRR2=~uk%D)xgJuj?|d9R5nl|=60 zKBya+a;hK;Sd&vs*(O9#SRkq~RIGQLQ}9h8GIZ8q*&>t`tiGynKcgb#umzQi1`fnTxlHH7F3+*a=t35{-aIK1Eu_`zi&XIh0}L(*$zLq+lN= zx?9fKZk|`aywpRGKmQmJc%1U-KI^MGVw@WQ?S06m{C;aJ&9i_;3Z|QWQT&2n;_ICf zRZEjQFIwv}MzRyaXEXEejaYh5r|}8xCmQUywkQf5x9^DVC4vF4(MNS$^GBp}0`=(& z`h307Ts32>=NBoXJK~=OJ(Av6ko446_+=h{%@Dx)H&?^bFb=*+C+^$Hi=Ab`t=r_= zB%e5|F11H6TV1J?ad=h0<0%`>dmEL0cq*gSR5cECbA#S6P&_OO{A*6zTw%c3A z0Rci1k4F9#LEL8y+Izr!g&xr2ow_UFT?lUC8jKtClLCNK7i6?i`(eEY zi$2-~08>|MmSB?U+Zr_+iZIr8ThOV+L~f{bXhJs+e1@f~DFEj_2jp^^H2N6;jOfCT zsVb4c^7{OAPier;0c0_fB1oTsMrgJiaj`p{P&7?`DBo2i`N`l6JiVnord7-BAOccl;kLtu-jY~fNaOoTh+J{+rGwj z_Rvnb(iV5;NJL84?aw>69oL)V(NfR>^-6R^nEN-KfoKBJWD1V(d&78Y1bs>)t4(V* z@3A>LCEr33lp`m(-QV6?oytCll1w%%A)pH(q6b|QGmvDee)ZA=c1IY4cfczR7;HKs zqe9%_o5St3V6HgAKBc|~ca;N)T)!Z7nv$=my?DSiD%^qM!D*s|>%-DwNF^-8!*hn8 zbe@4Dc*+3zUxj=^J)lvd6J7ZsTc)O*JMR~g*o)X<%xnB_N8o{$zRpYY>wGqWB3O&& z@*3c1w?;6`1-zm<^tiGfHK1%wxA<}Noi!tP+sBc@$i&lOpYnOcocEm|lRZBP_*{zM zA@1a-ZK$#<=tDD;TMvd79+Wjs$x3kq{xQWOK`5Shg6sRfJ((DRrb)Up<>)8+{tYx4 z?p;b?V0~WucSYOmy!=xO05!w)U}2Z^$CgIL;z#KuWB9hZg?jYrY*BAObTrqy(UCf| zE~MJk8b*wTZksYVuTFUO7t?Ik^$}2_^*>0g_5Q3Q3Q5DH!{I%V8#>8tP#wH*Rxc=u4k)zzhZQ0;g_#+o_1&8lI>Pw; zGh>m8Z%^+wsFM2KJd0L?WHSHj>^c9Pvt4$wFk&JW&8ja_W;WGkLuQ&$n$fRb%;7L7 zgb1H(Gxvt!)Zz-fh0t8R2i%4oqZ}PH>`J!zFvZ`J120Vy?vCA2=4q>$sw2-Y@?Xl* zn?7p#&gY2)onGn^m#LK&X;BSKV-8U$etxw#8_fOv>$G~|gsJzIy)<`>OAKJ2>m`=J zq}_Kq#;OpIJ#FOShRciqvWr zbJgwMHFmEefx8UF>kRV8l3TKk!*ACEr~7FCA|D-RVjfRL@D(tHZh0=|d)+F(SNqmu znPwk?PHJ?FXy#o4s8>y7Fp{T&#X{FD2e&Yu%c)zutCK_j3mUR*Z+QzJUND8Sdw; z*YB+r^h>vP7A@Z6ehLW@t10g!ebZocp+0a59Wv>U{4Vh2;loQpOT>62%Az0>fM47CHsl!e=T5u#Rrh&{E3TIl5vO} zc)Y_5K_A>YYZPcFA`BopfH3j7ttq>>^TO(+3%=Y&hWDvdtgIcdny8KYBd34nO2rN) z===V3T-rB~&S(b^dlpGt7BO;duFIY_Hh_pnVzola8zZ+VeV){vkjHpJ(9V9_Qsz0G zZc|P8DofnUAOa%w*QT%6*PMo#i`7a`6$(a@-p!`>pcvEV>H^y3Z(f3o29Y4$0kCpP zP0n;yMQ_!X9JE3JWv%dqB)XLqiLYY0<(=f&Hj5l=-aV4KAOuw0fhHHjNwpNtE9VGe z0v9uqKc=${*#KH=fTw<(_U=z;#VG*;TCxAAI)sA8KN~fi2j8EiH3$_C0u-yKlr9rC5o$$>IV<(y zbgtU-9}Z%iH~PL=_D-ED71+NjR4Z+Ad_2-YOo;88_m3Hu%=CG=&0bf!cGI8Xr|G-! zR<+J-f*RsekxT)WwzbyOybpgj%fj+LfG{Qo{zmR}QtLD36j>O(C zcFUkafXclGq9^q#$pd8m?W(-SNPJ4htjY%g0O>{Ck49-j#Gr&|@WNe=asV5P#{_^gX*X2k(2NvbhGw@AaKvr z<|9fBRXDM&URQ%eroEpwV@@-GL%{%~lA1ZuM(p-AMAWZ4EZ3~4sIWW7@;H`v)UZML)mk~579^hY95=xq`vl*2HHhn52F*< zW+kvwetxb)Y#*}+-=n)#f{!dqkBXe^{5e%(8(^~~0pRIUB8Zew1X8WAy+T-W7?VFl z5*32vsC8vmTz99W^lCJN{QHkzQN+D_aENDnd2uvdov%`$F>VQAC~skO*_*1o?DV;f zNSe7Qy$^8#`v7si>a-Ry^BED3bAj4Y(zRx_`Opu;3`_C+hXudK z`Fx{{7^?}yrf-t^?@ViL4;o|4&$gnHyO;ifO4}hS4zN(4v0Ra-$iGiT$=`3w6$`~= zmag8JG}jme8Y`57PkegGO?1QGGu&lb(_9q2i z1=q!^D=Yub4FCRh{1!9hEF-4CwxOJSU+&`72QBZzmDb|=db?HJKZxjTVlFO6GLHc6 z8cU-+s}+DxDHXd?A=i_vIiGIQL7fhgO$NS?D{^lz!~$M$gUHZ=q9EfK-{Lfn z+Zp&A`+#MW9-1zFO%B+l-uk>mrRu0p zi6&WY9U)j0Iqq*y*;N5His2M_IorUzU++96FwV(M7sRT5+9t3t^}4eEc{G^SHj)~ z_yOmKq?-uBuS4eRgFkxk_%8~2dDe?!zS1i*V?5+`5?ny)ARk`t^mPx)QSLaU)$k{+ zOyzcZ?&TT5ht5fo2;WVW==2Omdcb2XmZAN>p6pLHqWc`u zkKl!F*WP(O%;VUOiM%KG+}{c2`Qh>+Mf$71FAQL*9KU^)8MM&tTsuGW8HrLVwa4^l zaD#*BmK9>$AY3ZG04gdTJtHt_->v7Xq{M4Qb|>ZCJk}oe`8yK6sM7p--Tx4$L&$z} zsmbsHrb9g7>D7v80pHCK51erD>sp&dwd1V+eoEjc4VY_j^6Dgikt+Z5aesc30etFN zXfWb`K%jrU82+G9cYpw z|G1ntW~Rsp4GGErCfEOYhks1QAE>1@lD`s|zdi6ZC6JBE^YKakzxP=NJgO74lm8!P z_m68GO99>J40reM|L=Y3AosW}L-oXv?g6-6qvHYib}2@?-fnQYnf&SDN?)vMv+<9W z0E2nVak#K?V92hthbfy%P*+KcYu;sOG2&1yPEGB7w8GC2asMiVSN-ytcGu?LRuAj% zA&<54F`PXzBl(s5daHza!X4u*Cl!vj20>9lw<> zn7JDM)J6I4YqkB3V1F^6{(MdR6=6RAR=Uu!w-5g3AqFnzjRiz7PEr!8{|elHj>+%y zR^!?l+W)hL65%lcZ6CK@IH^51FuI}${S--d_z3tPgKCc+ES)m-RkMSC?gj5;FfAF5 zCHi=;+5cHJ{}}=!7NGH4b+g+sXtXptnOQdRz089sicwiKzjTz7)E&h!;SoF zF#k2Y?wA1TGU>gof=nb4KQoPdS~Ng;-4z3ByqFqb)3Ny8Gs-@ASzx<W?n5HI#OWDP+^WcX`2p0_{Vz{WBwgp6U>DgbsHTdIIMXO zXSstXsBi!rvyNJE9Z`ZdCrE@T@JR+CIK=`3Sx&FTR(H{pDrYv;b+WU%%JYaockhE~ zl}z3H!GKRN@}aV@oOUa59%s0N6L~|>UzNs=`*RLbu{^aVW9hnKcr4^m0t;enX1St2 z224g<{xggJ{^kw+@7Vx3!wv8nW?#n)24m8S?)JcJ_T{3#^Y2}Zb6ILhdo3KAYEh`r zJeFn%c&M>mZx}irEVv{j8$1;#m6Yn#YPCJsrt+=avhFN52LqZ!9aPJKRn>N@FhFt| z_6ae+H%v^kmeU5Mau@KX%G(@Gdk29S_WfM9Fs) zvOPwtIa#n%BCxpxN~y9Ov)~3pWQrJX!c1hS5URiGlT^Xy*aVLa^}%gZYOcd8y_G@f zrEQIxXdy+Zdt7mR@ns|How~_lG%8PVmB)ls@+PwBWanAchmQxcSqsyZo&wyyKa*{@ zU8nUZo(}ow*6;-NbPCnUi$k|KUL$p|jg-k+jb|qTjia>WEdp0!gFFtWEh2);9VFcx zmp#sb?_?Q+9sWn+qAxY!|2}Bng(~2^t%L$bK-=Qn{-(Tg((Mimwe4@Ox-8?y=pGj1 zr=_v)Dx;`N#&?{|3T)o5Ea^q70+ z;)dKS4lRKxU30}wU5*YH%Y(=%EJX4l?Nn4JlPF7ph}EHW6z$;Ab(DkK<1}m~xSfl3 zYKtLghclLQhwJPp3ZYq6VfUq#X%Gdk{q2Oy4!dRH6ho|N(<}>-%4LoUsbL{!hg!h8Ra@U*gfK`;sp_rwoXEQV8t}-I zMXfeEmHkOnt(vy2$L!s1`0Cj})MP#6fcGj_0#5x&3XZxoHmDe_(Zo{FZfBvJO-R*Q zDEO4w!dJg-teEX3vf%4>X*v4hlF?d}rqtlH@C4fdE8_&O9txx7cq}&ZUbS|0n3r{r zkwL`+2HeT_Umx2>jcnH|zLC!UGzzAc3>Rn0R&XV0&c*glfNpCp#z8mO+vk(%Tc7T< zlG;x*SC-v&tKz&ma4ePc{bhNib-Y*kv5O_H=CuL`k>iWq@=FvgZd3VY)jxA z4+1nGB%7QqprJGigTUe5WTS7S%J$&{xASqurqW|i7%m;-x7X8?zQg-Bh?s3YcXcIt zt!err-5`HuQ?;x7r>++Y8feWWm&#YQB z;De?Z{-r@HXhl`}67cvL1dnJ#2cnG~cP6L?3ns7`)KmamF%tkluN;+l7S1J`{C=Ja zZN?j%dW{UsV)~e7|I1>N!5B#Om}}ko0Za|5vg7zypk_Ki5v5W?I%8O&QEisCzfi00 zdbm)kX)+q87>3J44piR1!x!oN@9IAAj_T-^`YaE}y4kea;G!;{b*`4iSQsnKm-n`_ zrY%8&r8cIvOj$uo4Z=jAL>&)mmLJ+y=74as2VtRU=K5i(d}n}8o$?|?5=)(o7X*++U zPv;pu7U=U=C7;5~eYjwcL#<0FUydj9y@mI_x$`qE)k?#eP}8gj4eb?qk0X*)_yNuZ z)s{=e>WSI?Va`Y@#cF_ka!Dp0-JIWACXqtM5@aFjG`+|eH zbL$n}K3vSIB%Sx`pNWy_hm!B^Oazc13(dQqWmeiN|WxPi*i#h zMZh@T`{{w}1`tS$0m@I-7Mg@4ch&Fw;2tc;wsEocZCUFtZCTLI0pAD%fq>TZ z^b4&rxD?rI?kiiV#r*f_`?dU+xE6PEdJ|zhvE!ddNuAsO9XBqe%v9=ddj z()Ww6W+J;GESl~pwFY~3yK8MP<8$~kN;H)qA!VRir3n(h_UPN9d7ZCA%8URv2P&6c6<>9{ z8BL7~tSavtX}qB1L6hy#GEy4x~xao<8AKfDhHo7au{0-h~7CsCCVcB{YAOvZh1}easr^A#>)M* zgt`V-Q~5_8PzZzY<17{8`1u{9FGIK53=qovSWGP%)tPW%cE+o6olD}Md{udshWK4} z<3Jgc??v`9MKeVJIR1Vpmf=-24#DfsE^4dw`gj_Cy^7Kd1Gg@O&|1!0OR6933~IYO znEfgC{2cRlE_C}HF3y|FV zkG9BXw9U|GP><@-F=Z@kq;oD-6Mvv5J$0<9c3;C3?C3- zT7RaB47Old#I04VS|UHMV`apys;CnE9{WI0G3N#BbBQ={^39w6A;}bohcg$)J+BwX zuwYj6QN{6s`4#ckorIOm#xmVpi^;Fd!9#IOd2t51(8;k6yVpKPln|3yXXErdwvOQSarhQQK-7z3P4?IhP2NjV$C*P zE26SQZETloNhwjkRP54tHBN*Ceq1TSe-*iM8c}#s!6Adek{*#-qnF9$<#{b~19p3P za8$a87Y;2sJwx=U_2ITq>~w37DzrIQD+kCX8o^3Ix38hJ#c6Kb2l7wH z1c#<~bE5!>Z>ruEYJCrQ&le2VB!a$5a%nt;b@?WHbqAgc1%umwTfmigmKxX@If>7s zsH{(+_H)-n9z$F}WoUAMmaOJ973=iI-}-QCWB?Fh0nYU#d|NID$Fks3joqgzU=by< zB4DA0#aiprPwPU8ydbL6-08>h@0ws0sf(iQN0Sya`MH|fZBmv}@MWW%RV~AltnVNe zO`L2}%+O%0x=F%mrOVDO6T9vY7n}*We3qdF1wpIc>6#xRniTnIU^FvA*@uxvxkUqR z9w=0v=(hVIZVR%DIT?;vtZFcphN{fmItzV;JQcP2U@h^+G+!p6PulmV$Uu!619@r( zh`~_0;(RBK_@JAhoinstZS19*ecR=wbtiDVl$ZH3@16X`^$COjf27)=&nZfTghbN^x0|X z;(2>u5sTd;(@3j4v!vX%5BWW|^V!+%uP$z#KIVneBLnB)T`WckyBqG_{%}H9?IW(& zTVq3XHqJflbg`lWhkB|gn;**sk^0q4U%M};*Dke~Y72z`R4mgx`3$mFoXE&$U=uX_ z7kYX0RCm5}7PC3Dy}N1$I3=BqBp&4&_vc?omyD~@#Q&{Mh*6F7LxjmpH_F~h zxmg4EhRJz>hOwbS$q367A&$({NVD-SPWH*X93*k|LA=CX46b(S{Rnbau{*PtkEC9M zr#4T4ICe}?FQzMTGo>KQ5}kstA!w`$p$i)Z2i2?sR}%m63fBXyN5($=u5n+@x3(?4 zu~*Bxqt#c}5-%=~FmjiVhGmIqn|U?Pld5_!&x+IZu+KPt0;BkI{3x+nx|hJKWprRr zAwYIkf1kP@ZYFOk{?ufjaC$@;!~IdIR?E_rL7)i^D1l&llRWolLy^w# zNd0Xq;OcPx>n3{|+um}#B?+5Z2aq;P;&G8|e>k6vX4{DxRBI|gnJa}!gFFidum?N6 zq=3pH^Wv2A)W?+iPkT*!Sqz_zrYE8V_GzF)!KI_Sad;blt9BXvmUShmuu6Wf2zzrs z4>?VB+>b;+xh$fKrkL*6)w5OqV2M4rPC|U{Th)VXa_yuFj|Npd{A$zBB%Z{ttkKJ_ zou7%Ls!X@;o7nh~q0bf2$FeJk(jA_X^CIeMe3$#(aJgvUj6xKg!KCRVd0Z(Yk~h3J zTN%*7si`7WadD-VVZiMKapxnb@~T~B^I`mE5VDKr~B#%6!vy0HnRUy@#!IfHFWAWl20KSh^GuWddibb z;fsz?nB1aI7~LA5sj;m6G0PJd8;_%#Pe*153w=WpBT1(o5ba#F6un$51!7B&ms6Ol zQJgW)@l=adk?Kw@+*UwE_kOa}Z7uL`m7S_~nlzfo(=%jv-B!1+B(0h5Gm(?57t%=* zWq%?c!C)VUn2BewWUPp;yko|7uwM*;`qg@hOG;Z5T#chIRY;?|90@XcC)BNLmN zw+l=7-6(uAyNwAk{^gGSeEZYq+9T}G7c&xp0+ndBR#y!XP>%ooOavg=M4KKpG`^PG!Como`e~STn~0551O5}Ba@YyhNZWyE@b02KC+o3 zSASiqHqS7w!eLm$8qqXJ<8<26(X6*^l?b4eijhnhmDe~SSVwF5I;6mm-VOT}%$uLLuL@9oMFCq*BN$ z-;h##9P-rPsZ{chP}=E@aNIh5`5={x9mPGF|J~hTBXLr9CG%tDQZk-3b?eOx!RI>2nNPt(uA&yVVcA59~eJdyTfO(7>zJ zXnIwWlY)-%oPKy^#CkjQ$~8z5MKj-p$^TII)^Ab0?b^5?NO!lWfaDMY(h^Dw(g+Ma zNOv=gbc1wBN_T^#lt?4p-Q5F2z02qE^X&cI-*^83?{9O=K^$wX`@Yw8o!1$jQBEHy zd;*YXYmaimK0D7JwMA&QE_d8At^z-7c#kWez;^f{?UlJip4I9jVxX4Za+Fogd@~Jo zUG>i=nbq5%I*C;hbrt9_M+@WracZ@erwA?`t}-38NE7tx(&RpmjbdA}QXY-CyOg#( zL1i`k^oT9p?bN&lZDVFw49awfB~C5mGA~%FRrxFRqpfM*{}{-;3IAh765b}&${Nh< zkIEAy@(b~v#$Eb@jpzl2RE39`{l)`3xomly-POEOh|FEfgb+DU5juqSWD|dnLyjT5 zLunM5ddtU}p<}m=XIUg#elan0Z#hvyPr+lVeE%3!Y&@q+ETjy%I9k#qWQtsDO$RER zLXFxgHoeR8vOUXkcsxXrNp_otDp-LOygS zmw((MmG^~q)r--B4^US3(vK4;rf&clBv3*+sj6!3^#i?a{4<;C{prfFQ%AEjV#-Hh zEb*+xjkl~*6V9G{2i~JO_K)>o6J+_JGhc+Dd@n`6d&^07t-n)CADS1*Ncn zrBlRarL?2ZHl#DDlhglex^;*K9KDBV&I!QO2Y(Oa4KtrZm-6*-r;^FN@7uxvfu*gK4z03yriK8^cHr=FWk6KpN@tr(;U1AvO}r6bKz_gYp%k-^>*C#;i(33W zwU?eNekYZrir;Dme)Zk10*QAk@z+%r7as+@wu8S4JIgUYCc^kxwb~tqk>`@AGFhUv z?6)enyjyG8_sq3nzR`J3@VJ@MwQ4%ASO-nwu@S|Z2xhS zKevPYi)>Dh^m0Pe|O*X0l9mAD6L7c2H@XHIcyD?9=Ps~e*hJ6 zI;_7;tdfg+$uSH+qz6Fpy3t0o3iS5v56?|+m55AcYbXwb7y#-dpak40RWEM*_!cg9 zcOpY9`Ftot2z0gyaw?>Wi-YT2(!G8Oh&t+uqGj0Gd`JxF)K2n6$T)Os;>`2X!btCk z)<`UlqhBLZk2ZqnV0;sGbrnAYRJ_Bg1_C;o6RKVwz!$0dLSH4v2O>p>k%4-{lO1)H zGsV2PPKnAfWv;$EwYdtp?k>mtw@Gz&;v4D<&SgZO2xcE#y$c}ul@Jo};W1sc2Ob0R z0MwJ|9wz4$Yz@Q>!L%o57hLJ*dGC?*30o{MzCuPB&A~idAoH>E6(w1t$irF9B4Sm7 z41N8eDAL^K>|il)vB$sO6+&e3aZ?*GtT z$Ypl*W16tLO0`V%vw-!1WXb9`ANA<~>XmHbb5A}vqtuOP$$_Br&Y{ZT2tA!#1f{y} zYPRRr1^;5~+pveq(12|B(XNxpdq2P{Km^own!u*h+F=21gD=M9DN<_PaIrti9oDgh zYbInZk}=ws^OS9jD2b}U-QU9{@eb|*GxYV!g!H}&(el=()^q*}iLOCBlI(G(u*;#x z?=7-%*2>SpIdbJHxy*_l(kQGivx!l~-w=?9qeeeNfBEv6))y8PDK&gz67dg6Sm?;l zXhg+5j_y*;4jB_osy&-rJ?CkKL2V4Ny(Zk(W-%?oH$y^?pWr&1z+P=U29(JS&!brX zzHz@Kp}6eNGhEFr-TTxx_Eny$b;xDB_SW*;URyw?3%3w=hxarqWnvGM%#Cd(HGbX` z^K_qL{1ewI#JY?~Q{2;;+sT4t8TkG=)(8_G1v+jFQW0^A8Ag%9XX*V(ICN!Tt14JS zJavg?=%4Hfe(xXnkcYVZgd}sDs&yFreC;0NgDSj)Ny=t`@u`v~YEJg47BZk^NC3d2 zzB=o<_yTQ7cc<&0OxSg0{v2r04G*l65`<$Tc@NA-ztKL{CsNQU#bv+6Vshe%b-J?+ z_tGgUSUIZ{5sN*StvdO8k#;x)nVBv)dJ5_P89%%krT?Oc(JyN{y~tE8mPpcefB3k* zhO!L0m8O^rgqB&TwZFaA>sC9X#1(WH7vi?DQp0|mA(v!G@R@AZAFt;l7kw#?!hh!N zQq)M876h)c(@+17&V$o_0JahvskUv2K$=+}%wiQ_8H41aJC51d3Vb^fV^+IEe}ZK{ z6OuRHj1BjjZuCu9pdX9!s$JL6c7%~T0HdB*VT)5Vkf-Osji8j@jE1y37?AKtgmA&o zjr<*jD<6I|kdB^rhRdqplw1O{eTS<5;QCH!$pBBd}MUvv}j@w6? zZAsSBgdQfat003cKtO1W3QMa_NK7|Fi{&quf~_@(dh0E_iazQYoZD506Y0(2{C6i$ z%JK4VW@Q(%!%*x2#!%Up%L0LxVab;E)?uXV*^JbM&q&m$<^z|nk6Ogq)7VSLty-1f z+q>k1VC}(0&$&{Pjb7Ro>Wv4j?{alTmU=ZZ^l4GOjVYV?IEM-}3^j{9wvRauM zh4X^)QX4C8FPpk8o@~NOO2R34Wbz{qN}TzJi_~q@=Zi2IakT38`AHt1(urWBf=gXr zS#Q^iXatm@%nG>oIqh6&=}aXoYq}oYE4Av>NO6YkgL&C#xiIXZCsqvylbA7*ici=0 z_~{%M$+;hH=!(z31G0a$BK2aniSd%5>$4tgL+K?ceS|tj8Ng#7?dixNHl1i8{H4tA zW;jY4j-{qDJ3 zlJ+cmD&8B(; zw(u!mOw69u;tyyPl;FYw_*QK$T z8~8LfDhUGOx$o>f?dcOc&=$lwl7tzhv>Y#MO8?5~ZLRK|?dMedi%5sae)j!nG`dy! z&+S7&ez9Z5GvGvFd-i;T5!NJ>Q_c3EkN%T~!r#`k5sew9(6yadz^^H*u&m zp5+RgRZ4+VJqYiWDrxN7X|qqf*ILi$=;AOYJKDF_cB)M~x(eHARS?Z1ON2BX zP*FO?ROaJG2dd_vW%KpAAw3Uthy#v^KIoQX1kz$XAasP$oEC1lojF&eR<=ImP?9ej z`_5CVy}!@PC^Z=6ZlWKT`t|k<=mRNq%sPpS)CikVMds67cJr7KkK1mkb~T3q7aaPT z{`(P(Q(#&2G9s*1|CtM(dUdJ9wS#Z6#=?^<+p~nyLn})f-;-TlnD*WwivM>q@cz-z zrFMp(bKrBlY};WoeY5k~#3vW_=uc=^6h^u3TskM}51l<O#?zL79G>xj< zf$ON*);5sz_q#R$iM~Woad&nh-C&&kQKV}e#R)=(+urv`U>Zxj!EG~{X`38`S zs#CAu^IKw`+qKPVv>}U@Fo{K7js{lj2{)rLZABn-%w437+mFZm`M!>gWIfJFO6hi* zI(*2nZ4+N5MQckE(%0vQS-Uq3`t^ApyKelh?PKxXQ>lgk@k*7=@mru1VBrbyi94lT z{tg0M7$Iq5#+&&p94lJV$+eabxa5|<=j+vr7g@+L_eiAid^e4wYE`q%?P-W?ccpo# zd!`$DnPXau{NFc;%Xr=^Blm{ii{J7Hxa=n(s1skAkG%%JB4DEf6&!PL)oG&I;Qi#pX!kAux@Z;L3F!i? zi1AA9LGb8zibg1c2fbAurSnC;X*bpO#!JLq^KzfUKp97558Gp1Tb?4N-P~56Wll`s zJ*L02yOG=br=wVcMNw+)X5mW@%c=`wB8}SF*eIpA2Xh6os2W0$CEa>&Kiqdw)6VxL z5B(sSED6-}%~W+^AqBb5T2?MMu2VMh>lGQSed{O3n10ix`S!`#zu<6@;hb^fdiMaJT zm1M3#`z>M`f9hSa8xTjmu=NQmaPV^PrdfZ5VygO~za`f>Rhm6YT5UuhM?a0%vFaN~ zdHip=xQ-UL{VRI`m-B{o(3dH~F=g7yjd@>6l=U~4Z;Ed=qJ!v0-g5S0Ke02Gtx}`l`bl{1 z7Ibw%8#U>l{VDYfczn@6&MN+avRIJZ!k3M^2%GVZU0NQ-vE3i;>NN+w8v5w-vhEFn zS`1-sj59c3IaOWBx(XWWio}4cRVC%77LSSIXL-pk-9f`ocssM_QkHhU8WKMnqAJI; zM}J(2W9l?TG0-?QGWgTNMZKQhPB?IJ)_QD{U4yQ2>v@E>YD5yGZbF@hr4f~PAjV<< z+7eYTLrc^#TW4(@VxD&|Qe-=KLwS1Ck{`g3m+Wvn>YrpvW`+MrEUQ#5evXZ6kQ(a_ zY0>^Th9}VRb3>enCtvJFbyrDgO9t%S6KrY=Yrmur3~m;+mu4I8e{z9eT>ZeG!smH- zrZLlchlT>ymruOkucm6oe5~CW=uDHkS15}P+<4*ba6^@dcYUeOr^IIS)%}U~U*k@r zdcJ@MI6`G<_5sP2`AyOLBh~8o#+y{`OKZ&B?&sp@1C(Aub z-UHIN^TAz8j#Pn$vq46U^{E$ab^Kilwd zwa;I#=)x`J5$8Kl+?DRBy^kq0tm@|%FW9ILN+Ep>iN`Y4J(xFB8>Z_VN45(h3gdc1 zth6ap-`&!9c3D(%DkPPw!z>St11wt#1oIMs_U0GU&Gv6&Bvju0k^RVb7H%8d*WM?1 zllSBt9#_8zEEn{VSLYh+*Jvct+p5~vxB8MhKVVDxFC;L3#Lo^#G@= zXSxHHV@ac?O<&p^eL`5|-hz3u`2KqALocm7uq<7i5YYn3IR z%aUMuG>`XPlUn88_;aH3@BdZ;7>Yn8AVCDV`)9u}B0mSy9WduD{H}>km#u6F4dS(5 zeI;#hW`K8)Zi;_X3sk2?-P57F2f?YA zyZB4fPOl%{;q1wO;MGQMW?3LgKv8!m$1(-TPWgOYFVC6eW`TLP?nrCjtG~6O6w04A z(Vk)&{hz0)u)N-@3PhLbM@pcdiW*%#-N+?glf7t1i={F3)oVX~S1#-UBZI(<(`5>)KqxKa)1hfRh0%{ds`aQG zTB;qMP?vYxZI#5Ju4mLY%wW00D=_M54>te%^m(|_BA|IKlUlne^#DQyL5JGRr4inQ zx(_UtKh^mI+z;m!Eta33cUs2 zB!JDhA9$#rIczxK(s#F?U{`wzf+wDw82qy`l#qu6hS#(yT>m53k|9S1R|0Y4*4u8; zOHHmBQQM?emJTn)2Sw*-FH>LJ7ZGB8?G&iDUZPh#Y|u=p|HNQTyYq6PQ7peXF#*k! zeAuBkMj&XgooC=+!NleEpgtZTs6LLD9@?uiKhub1Us+ZyhBz0C#*z!UG+)+n@4aU9 z9sCB}^qbam!P@ZJznK}Uwd{8DOuju?W5R+M^VWI7VW++O!vjTNqv>xtkDQ2uJ24UO z{1DF2%Pxg*A~m{p{B=M~wvEBb8QtA~7xWA0qg>;H>eVhR;%VI5Oa2ryalpLdgVW5Z z{)Gwk_&y2Px!&4}a`B(L?yQBXsy<86!A$577c z&S{#UEq2l4rArEjE#(nTZs!=hYi73M`uTsG(>FN;$9La3MnC?#{14J0^Ir3_-BX{Z zpEI{nu{OyD68OEYQcs=*%n$|P(3wskeOQjJi|YSV2lWhdr1Duu?~I96N}-{M2eq89 zI!=n(%dYor2oWFeq(f0?6`gm-zsQmF?^*EM&afz5Zky5={f5h%wBIT*v|#$v;)*5} zj*FRI^;^*i{0AO!TK=jx!>u)=GJxx6H&ZrfaX{;`MBoo^iSf_zsU1`C3%1E`7I8l zuQ90gU45`%hw2D>(JPA*aq$@m)Vr3Jf!Sbrs4D=iQ%dL7M0^FR5L9Rh}{ zZ->LlaYgaHezx+c2)gVZ;%oi5U4V<8n?L6+Qu{t=cG+$)+*94LtpKxbuXqO^d8dlk z70QuD$6IWu_bg#1c%{5AR>?gqzk&1~8;SyDEt_ok0A+_rG*r)Z$sHsjG2UQltHh#5UnoZT)OUuo*dM47R|_d{g111Q-yr!775!@S+pjFg%Ddl$L(SBbJI|gPM0i1iCrD$_FTE7pVe&Pm z^9}r{Q@nh|_MW^Hua~<~NN9oP@&%8X0;UCdwmHbi9oP^+r!mwY*(ZOx!iR@aD^|9| zZYqBhcQEr!=f|FhW*B62lq3}gtm?Br1W~LBN^Rplj-$GqeIuJT>&mAAyJe}JhAd!71;zboUXV8q;i;3w|dqO$#L3#eZ+1U|Iekvkcr17 zOp+K)<5ZUduo5NbW4r%ZX5J`I3B8W*=R~=3mG=%Niu)QWZFec|_N8UxS&qZJLbvHc z2SjJ$m_{2apKn})MNnz&!5WQf80>`Dni4*qZdpR_<2;)*p};&NhF`ih&#$7X{^4i| zR$3xhYq@*&iULp6u8kO^FF?5)14l&;OI-L^=u60%{-xay!JPFXUMbpO zn2B72+*k14kIP|rVUhOzaltM^ipkx2o7Lmj*CwuZ>gR7mm(kdWnRR8tEMCJ;H*Dk$VUpe8WREQm&8`F=j>E1hq`}mvgEC1c1S!Q)?2DIaE-sKy|)*=n(!M=bA}D zf`B9B`qQpkdl!z4AO87C4Tg8Ok*hmwuOa9O&h}olK!D=DXM-ZEd+tM+=qjy$@un+Z zJijzfJoXOH>s^`nzWSqCtS%RkeN#ojq??v-%03ZC#ES#>EhVi^yvIaL;jxzowA|{B z|LD62q?ezNppR2vyh2JL-^Nb_^#CR#-`GSGa-%> zA8}~}7z$N$zYHWBLDbKh9{*Y2QrJEv$zyL7bf&-fKVLo)>PbI}=X1Zz6*{Pm6o&WQ zLm_n=!-@PQOwjXfu9>M%|%1I{>YMfWr601~k5%3Kt4ar`mj^!)$oANd>AkZPz3fp{H`n1Y-%j<=z zzAFXCn^?K}MvOy%K4JIf$qAAlNmP+q@z#&Y%`CK>t+5Y_hF7U1vD`0Ir`Gc3zoT}3 zDX&|ZaeT8--eAUKIzWEH^J&%AQ&+l%;+C`6GLl-r%O6oZ7~S*lLot}jhepV?7l(g- z`D*#-b-eLLp?|!n{cXj)IDmIvElZ2Wxx$p02E6oBo!66V1UOFTfd=5LvejV4sNAZ} z-IcG@)*9=cN!BuPPbiv0kwxG|{`&lNnA~zx3CE|^I|5IrK1RC8pUO^m3FRr@O6w;3#)2+ z-7wsHhuQ9njgeM&9glR;YJ`yC2diAYK_uD`Jb2c-w!`(-vLE7b=jxOk`G)jIrN&@nBJ#_N#e{0{MQir zZ)5duq4U#1!puz4Al5_{b%)+$_SnUGM`uGMDnXlwT(b9=vw2P zkN&qG$HTPIEmmUv=blILb-nl_SG~OM+-EvIhvAM)vxprypph^{uxBqIF@ClLJ{M?g8mRPM%_vKw@E%mnEdREg% zz+CWn)3)*z!3QS)L$v>9ooJaYBN`SlE%CoSH1t1+g)hgG)c-XW{u-z^-rn;XVf}qG z{G*2d&zgA_3|uaHBe~Uo=4$_wMY2u=T+#JZF#G@JIuQI6co99@Hmdl)S%3WNwni%f zi+ElCdENhJ8SvB>9T=))hPCJY=du5f4^pDQD)@inW-vc1VE_N$P83ChxyuJYsx`L` zpbas=jJiJMUd`A}eg+Vy#PVdm?%pYf*L4)B;rj)gF`aDCo^M?P>zqC|^83 zzKS2s4ecrJP8=8E(p{jc=$WlenE&pGqPG;8)?|Vx8D=mIB|g2a*7IA8LM6(0lpZh! z@nRGM-7Nf0DpF_psiVBc)8{i@$$eT1HyukjNIB6d$NQ6_DOBgUG0SAPy3Xu=c5qT< z38&pNu^mWZVDp{TU>F%hIHdzb)Q{;0zh{cXm>EP+(#D#Na+wag zR7){1Mv#fbzPpnlV@U!n;xC5~6wwGcjL(`@dteBLn%1SbB(=V<)}RA-s#l~aU8HBWN9zpuGS$PsMu?|Y5-N;pM>HU zj(G|(^pXfIB?8|c^5U#OU!=K24GNTi~g`t0plp#!g2(nB^UOsWmgn zzj)*b28d1yZ>lR0p4BSdIub>b_X^Dq=|U2-a1W4zI6Jn;BhdSx$h4TXp5eeOJ`M4| z@XaF1$NEMc4si4K^i@w0FCt2*S$Wa5{)kQ--~qYBZT;&&(8WU0c&G(uJV}Qdzr#t0 zWA=lUw#(n}FWqIr%T`6EluBt z&5+unz3LyyJX@y2?Q6dJ(uNeHxhneGy>T~=+_ElyDQ7Ezd$Cxf#>%YAX8bVQUi&ruM+UeVSz*pH}tB)SYfQsFpvB923 z#{S)D*U}jj^(%rO5|4a+R*1jbnDK&XM1v4h#sqtF{lL zR*_`JB>t5jl-xiS<~UOp_!9!GH6XCw(3gJIK*s zS@ZNXUT;1#vJIO7jf{HRe0PA5b!A>#9y}3n+zhWVhn53kL5K<*0LJ=udH0;zL{Q?N zSW=G_mtGn_yIf2t6*76a_lwcx>W>vlR8wyA?IDe$hN2=3L=|Tlx}UBmIMm05UTD2O z1VTc_FRrC84~3pChh4_At4f>}2^#Tqsm&G5V&a+&eT`3b*oq4!>Dx^bt~}VQ6`_XG zLT~wbskgFMqv~=LQ{Qz@&pe0OPUqYx+0U(fk*@Au4`rRXE&G#EM9vFRpbap9c6M9rv_h{k9*+EcmfK zJ)A+$H%rGlRgksGCAk87Fpl^mfrCfs+;t^jZ%~*iYmxJ(xjZbbdF7Ej* z3@N5=Mizrk!e8n(WHu%`2&ChXX9sRA=CRM6(}P~xe(K~+wD1J{BAi;?1HWffdKjO> z+QIU~{1*Xd$)9Y)+*?;E{I`18e-A%((_U#@PFbcva$Mri%Q%|8E{|)=t!V}AJAXSW z)xN1e3}aSf6X>!nQ7#L2ww>uuX8p#cPg3=3CAnGX*|g|M~{PLag;+OIDKjt}hbvl2^a+16!>TGTJ1? zyd!WR>X~&{hU;Oi>CqBB)%X(2RqM~|)uyK-*Nj~)K_N;m2aCml4zwDbS!1#*64$wL zh*V}}A)}#=WXuiEY3N-G;zikl%>_SBdQSy(*K<}kF*S?~gIuF?FL~RFeah(h@`H|0 zok;oF0Z1~cYo~KFfoBUoX`{1E{)1iJmeSpos@`Sf&kJGH z+HSGiXY#OnbQKY~{nqT9GG%*fy!RfX9Wto{1E#(l9i<3S!$ zIL7v&PQE9o_KnHu!0Gu854TAdxd8qhT5E5QsFL%60RSk7-JPa-ac{|shch+}UazvY zUMe6ilUx#2lHD9gHaQ-t_nJjM z$*)hzX=kOSSY;a2IZjj0kTnQiHa?(>JKc~=Ey+yqd7ZD6@g5;Or5N@huPVyS`*3>= z-^8x0V%8#XSbBEeHWa}YMJvPs#KXI+!$fVfz-X)>arSXDVirGxpb&1mhlbMA(v6F| z2uG~RRS&u+Mq$(O!N zSCFx)9VUwuE`w`m*^Fpvw`pUwr=-)G>Y4NKCQCYPzXYk$%A;5N=~G!_xIw58Hp7QB zA_3WNwulow@x)#zs=#;}+WJRG?O;3COd@lJv%BCxgost!{XJscx15aG(ACfRFEt#`iMaC)4W^_)px22)dWT{|LVJ* z#$b5muP*)2c>=@MPzJT}&j}B-UY%0iS=Kh)T}jf>vQ+?UL{@v_&Qw@@`QW06kn0$k zsHg?L^6E~-kL*e(UNiAz@kc*(7u~HO)%(tVp62!{7QDLbKA!gJ49m%WD%;ll?pI&+ z1uiU`M5g(`uNGj+!!9A_%Z6qcLrb9RZt9Lo|1C`L_HhhKLT;QZ9=OXuj7^WDU$ew1 z4iADlg-fgVNemo#ei3qigEF<924~bUq7>$}(+z%l%=e?!O}vKcRJ!dfiyAr$W0|Il7O+MT;%`KIQUSmvbNk^q+qO$4w$@S?Bp7Wpt(DK;Ce9l-{ z9UHczYZ0-@KGf+BBP9%W;XHO*8-$x~A9FRb<7XZ~@$WuuTIp8;>RlH~E}T20I~uwi zuHLR`cs@F#7)H0Ka&~}wU1mCApE2PuBY8S=r*!04I#1q^4r{{LFs80CIj|mSu83JF zr*-%gcgGv45tUpubT=C6>0V&cU7lS)mjo!cni7%%%_8pHk&%$0PqjL0L;In@2>N-- zy4ijnL^@#hlL5l>>O7rnl66n>eSD~D3~U9GcCCV`QLx$Tj1c_Gcc^YpRu9yrNvGjR1e+BmIcQDKTGr~pl8fuZ-? zgWG*Z%JtcV;kOIzfz*dm;qqqHaA>Vv8C0d2>A*LB7z(rlV&l~*>0>gm)eSL(*##uQBk;?|LE)oQ8_o#TR_mHt8~aStX#UhJR6Q9Pw^u*&p`DMK+4kGrVmu^P5k z%#E?(2sP)Z&R51HUl)bNmPwGsDNB}ApY0vfgS4L&%q&?7!24~Tmpbd3Qu!yrF`JEs zzc|XszyRO-q6~X?Ngl7fvFWX`9*Gn)$})#uRfE$G@w_8i=e9EGoyU*jZ%!gKvm*F* zGOh)~1zd7JQ1H-+j>|enIE|M;_u3HVBYEuKzDyAIjSpn0-4~*|nTK2Pu(GjekjXvqC%rH1s7OJi;t zA)l8Rf|m|#1d5ONCklDC&MA%Ze*XoKo)!{({xm+zn3r|uW?n#a7sS=9x(^9_#aE&^ z2C!tXSMv>Jkg#FvDPp3|@9-bmS-Uv$49S8et~RLZJcq$8*i$h4IN&%D9Ltq2Mk!Cx zxVYGxtmt}W0@N0PH653SbLELwc$E%;css?zG}%c6V-+|%g1p~QFq<1BIZvv}(<P zM$?gQ*5n=-omkohPi)!lR*i|qlzW!soIcz6lHHPdikXL)PY03V37LH@~TXT(KOdr6;jIkD0nKu`3Aul{^S86hFQYjepsGO z(ljP3iT-Y<&!evvze|~|MmN8;!w(0R^Q((QO|H%XJfuyx;ApgH>Lt{NIg;k7JYZK* zL|RgmnPpQ1qDQfuyWa4iOSr5(scm`R9v!sOZ^MOydi)$iLClgij9}~wnr&Bpqey7Z zJ%=uh2vyz?dMB(Q!G#M)-6)v1i~)Nw_;@I6a72mzUHj9zXQWA7!j@b}gpsH~5Qx^L z%-#(f7moE0p8SPQ_R`J$ko$Ht3Jm+B6|@f81=_I{rg_PkVvWL2P$r4nsKHgBwke;| z&!|Z>MC=mDxLn{R*F?Fgbien@m5o(dkm7%F5No*iZ#=V19{JLc#^q4}Ch17A8$Dkj zJ@Ijh4+V&{dXSBEiF8d5;=7t+UCG{`z_Fv9quhQgNC(zgd36)!S2!hF=qq%H(`OQP z$@$uSRN$5vq!g@Z)J&vIx=PXPbh#gPx<4w`Rj6OF5J;xIf5hjw_)Gf7zH&(k<)tCa z{?!d7Qeg?^apYPf&NI1y(_H7v!-vb)e8$rSH>kK^Wb^`C1O{UPXCR+svmdYeA{Ka3 zIRx__8&p`Y**3@|O1t~kz|I(5?#R^HQr$I}I}aOqQ}W!WJ@x4q2B(akW9s^L)_&|- zA*Yut6*bI68Qy_{c84y0!3q2xgpoRGcD8E-kUoPd0^|JmJ?9JTzd;eT@yC&gHud6e ze0O_oPyOxAiO#IP!w-e#Cw82huE+-Di_I8L27op+V0nl0+!*COyvjnIWoQ2m&6f;a zvIXzJ>d0cTtC&Ktb#1TwV$8lu7|Z{QIbf_-ZVDU(_!5~qUUiMlW*aZ612~wYRs!Ggzx(%&}@-24dv2W|=(LDts0;Z2PuDtM4 z*ARQL5TV2n$)|5usrhzjFRpg58czyUy0LR*8qGQr{_RJo8--#sKzY_-CHC;jTxCYn zd6vVdnQxUQr3!KTgaX{sZ6Dska1o9(`2p$?5{cp{DOQA zcfivtcAZw0I;*)d$jq!|hytXkjRCpalLFioq1U>9FaxP;N;J+&4l3?8%ipxQK znka1Z`k9;5navAFuE6P(*Ams^$K#+)sBx%JLLMD`lABeXaSuna4A|NFmI7knFa7M0 ztK8u5{t`>m`{nS9t8A(_=8+z3$u?*qaI@&w zLl-Dh{;@@4(4tXCA7+O;v3esCzUF?>t320!y21oZHl*^BaxoI*6zAmsAxT)K{qW`; z?@Mym=Gz1J6EPVD2D@9gC7g<8G%2^PTcVuMza_V%)0lcCWy4m`ss(ApC!q)nGh6_ zVqbKH{W&@wT~WSwpYD5sM51(|zOD_?n(A7-!M9Kh!^<^>%i34NBp?Gdgf8e(Ffvak_2 z=_||bEsLJ7Z3l?=?$aOBx#6xMp4=EK>9z&gvwbxAGqPE|_)d6d$ra=oh#vWu7!dQq zr`du$u@dPnz19enEDR@<7@gpR+4qKL#*SG308PGA@M{<^k%jVh8qm||;DF+e87VG* zHu&4TQ@$|XRKs@cvYYfgw4k)3YxbwS*$*oR&h_f)iBf12F~?VZhoUQUe4H)VMN7)g zp$;Hr0`S6Qb)t0BV-$NT@S6eoawwJX81S}tF_0k%e82IXx&FuF$CKYZ>25egci#Ax z5v-lgA!?LRS)RurymHQYZm_-4PY-#mCWZ)kdb%Ok<90^mh{yhqV2dvq^Nf)u(4H@h zyz3h5_&&|#ax!&DWt>bwaMQgZI*E0DLBx&54p&j%P4UgExAyI;*+0I#C2FXFl+re4cuNwn^J@?<2gW zaEFSuK6W=gufw?1>i@-(991B-CcO zZ)Jlw9-1nD{|gQPGY;v`kp9bm^9g2Dt^jTb+x?H9bO9me5Sh*BZ(NnC`v>g`{+gYW z#mS6@_zCX{dNGy{4C=cuGi_}BWXV7uwXiID;(n0yVJ*&XJ4*d<1KIx&mGu+Q05(PM z^m~$X!7$9E13nn&X)1q@ZWDmE!`-#J*<31T#4MhOmTSTA4I(l%a%8eTPe`d`M8$ar zCQ4&xJnrPl9}WZtqV089CPHz`d$@>*JIOCxcmXgR5?wdoIH)BwD@jwCpMW^ zy=zLf$y7d`zwN7)(uueV zJKs7Z-M#XYq3X;*t%eNq6w`wuWa^{_M|UTNHSfMprifQ3DK_&jHoaax)z=h^4BJ}I zW~hV*x$8r>+}ZQJUg=B)Ir78f)*h9SG;KpJYsti^dwO?815F3MEIOW|dR~7Swcpln zAE!I?9KMGKOu-47TMy_7jVz6UgUh&hs`c(kNTX{@>*IrsBwP@%XI3_PXaBYu%PG%C zP8WqP1lp@V4K&F5W_fYmwqI}UI&*uEVz-Y(=ZZbB6sr8Z#Vp4PxNb`gBl*p{I!$ha zdIv72^^|m->}5A*ww#>@_NQ zJMr~WThoOgc!cSFa^arCk>7<1LzvDCPu%urGJW#P$7`V0bJEq4v}_{3z}8$Qo*yh_ z!>e=&yUS1_;^@4`l%5y0zY{)vZuL*>=U?iLK; zy`~cd-O&a+YZPyrVIlga;fz?2Qz<&397tpu|AWx=PU4c&cb0nfV`|a;^2O;>$Bluq z0J6B$hlyuUVKoVA}{<1jgHy$+6+~dgTnTpR|fq} zPInDG^y$f4d=M-DO#cvC1M-j7{5PCG%SkyVZ%}#SL@Ud3TMif(DkUwU(3Aabu}?W( z*^Hx)-~#$yrBX{P9>JB&RJ(*cLTjR*>3^BG{S_(!5)qSZt}m{UvR~WA*5X1i5aKzmF-m~ z+L8u7N}Wm7EIc|^64S|CEqW_*!&NzOvnE!xnwM;rWy@~gA7c3sdhltb6}~L~DKHCm zvLP|k#Lpu7jFI_Alxsu2#Gj)%$N_szHrO)QJH&I51N%u)|GfgXTI8G;hLi}7*IDv^ zb2v(Q1jame;M*H>uhMYmyzn*;zA&|QuTOw_z8)oSJ)7VUyTC&AuOaxhSXVYJ5b>OF6%#6+m$hDlhRGT_dJE zPS<-kNS%Snn$eR;z@DSi>Cq+Ew2Mg|)tGST zVnDNXF+Y|Dz)YtS`wpHkUK?edu-ex>%IuzXR2R@k<>1@T+?RLEqEOOXV0N&;#f61% zPDMLR0CMcNC5Jw6v7B%{@v#i?fIQ*;N|qiqd_l49mS;q#`wbWaVbnAk>+ET`_KNfV zANJlVpvtX#<5dI^X$(N5OIlJo1f{#XLAtvHloXIybSd54E!{|WH!QmIO!n4q$M^s4 z&c(SncYw>a-ZkfZ=R3z3&-gv@FXp1=+#BRPru)ZN9hf3EN6hWEG3OwaCk0x;I6E!{ zp_}Aq=D@0)=L{Kt)m+1K5^HRCf!D6OV!;LRlp5(8`!ao-Lfc1p;kT$ojBTp6A;zfO;G z<}q5CDtLGFh^P))UlZ4vAda(MYDA?lAo!D~XFIi;m-_bI+a$?M_ef6X=;CWC!RdtSv@JW$O255Pp5ri@Va-WXJ5IOBsF{Cr3 z-|Z3KJ-qimc~wc4{bMrFt*JE%lQ=fKNuK!?L)<24#Y1dIuKcmVmmKk;NWCxVOAB_w zpFu1ULo1v|L|j?`U4Fo4VkLMrRcgz2(X2Sj<;h zy_P@T`r5TW*08cFGBc(4ab(EN%YYd7fVI4hjC6@3R6iRBW{YKI{e$&g;meXACCP#Y zt`|{)cEQv(5)0L*Kzc)CHZ{H5nPJoJo5p*S=`ZiKf(EkN?SPZwb>?2@Y;Wb-@ATPe zPci5+X1(C$VFIkVl=+Bayz+u3qV1#02ak**TH$vhlr$hKbTu`t(A-QMz3^S2pjjhZ%?*I-eTQ+ctfOXy7qtt%i5=54eS&82*pKEUo7q# z`T5-;%6smhPy8+3j&FUD?pnizMMu2I)qboQQ#ngCG<0x{I4?=tZ`sq^mr2(V%G=Y1 zXr_kTFD}MwsfK(jmZI}ywI_6jkh7(1cKn22lOx%OJwr4}fk7W>cka|L2=2WWgkIO# zu0TISOHofn z@Hwt%zNqRvzMCaY?)W;vEQBq_D|s9e$lbnfuk9-}elT8m+}>mqAS8zD`2~h( zD;?ZC0LO>7>0hu%oTk^$tVp@-;3wbYUsFjX82nMk@Neb%q^z*-{njD+7 zTuDpG;&zqIPyQfX_g9Dpb7zrFUuE}-JP0PgH^gy53J1x9@fDtMqz${Fcm&SxA_842 zlFi|1fU=;A_+<9n5{VdghTHr`&E5NByDFvq zee3i~?qyOMtc#}sUy(qC^=)jM!Pk%Eo?GHlx=76Z=8azciD)0_f{E0`S-j+h$y9 zBVxJp<^(yKai>2HlD_%&4Io>h;ALU@vE+9J_)nYnlH z(uc_aN8RZ4Z`Z_moa894SBUfx7EtjS(Rou#1EE0V>Ic-4L0q1HlbimjhMbQj{D|$Q zIy0x{@%la@+5Ja$)}MtwN|%cvV83w|B4PS$2LAIN0WV+ggU2Pm4x@k5QHF7Q+HMSP zVh4oZ#Oxt~eYmkd8lHwIZT1g zVj9664*n!Ml#X2BSt4?g@p0<_I3#vLsE7ciOWv(sV~~d;@%}$Qh<6-}10qh>o?@x% z1VaqHTeV>;g-rTaFE~V@8<@|j20^i5jEWNd)?>!2(^Z-sr*kxe8H(R@-Z|y!wwoNw z5iiz)^a>@Y^YB%G$ZPxQ@DE4%zN7}NqPmUrY3*FL^lY;!`wrmvMhcWkz@9TJ8W}w@ zAk}`cnp(M7E+UZ(NJ9c%%b5Plj)qqUWLP4{6$u*mPoZ<%k3$dkJCY^u#X*RCYaz`k`+K^*xQNb%I*3BtAzOdW|ZSevqcg z-0*&inWd6f{ghfxNMmDej(z8%VN4jz9dSFA2LPlif;&>TwJc^B$-Yb_Gr=^O;hyWiKT0eFBX5=^P z)}A-2br+I%KtozDB>`Z-=kQeX;94gq;q&nMwCiE-=m{bwTKfn8GL33Eiq0sWhB9+W zV0QO@G*6X$YcwB{3zT#KNDgRoq}%UY2LWSQ{xTQx;PO;7k<%%4o2Ej@69&$$i%|DC zDM#>sD1sdQY0rpua zUyTiRtkmf+J&f7pdBY>i1dm&{G}GzIcQ+H2!hMNcR8De9K&Ja8cq|h1t|hrO++1)> z)H(&G0)-2^z3FZVE_SBjk55_Ew+i`c!thb2l#Uli*yn(KQ@pT?d@$$XLdx%RB$KOz zo+X#%C6~LtU7&H<9_f42pw;xI7_3q@%e~~~jzo3yn!GS3qd|glb=;}jbHi|IjjNQa z93RdkRx6pATe#E@p+91!+rD7G)VJ!dzr7Kt0xTrpo%hb7*{S~eq(-627tff~&ws7_ zL3WbI@km9n`tg=q<(uI$pg+6Qdhj2t^{>(Hl$ftM0VEpNaH;Kvj7%|XBF@Uo{-94E z455=1@FNmxnDK@^cAaDuvns+bSaP>)Bz|))e&hTJ?sI@G%KnA$>zPWcC%(z$;s^q0 z!vwTkho+KSU)q9k*N8LS&K4Yxt?QMdsxI?uX!y+PFw&fP=0w*31z3YShD&v_%uN6V zpWAfAw*^`;M+=}fL=(kuCw`aKvz#%&>2&sVb@2`8OZ}DwKN46|mjId(AwMk|%y12x zd2n*e2@X?2cut-UyQ>$efktf|s3FGbj6>4V&f#~UGM?1iSlJlLSou6#m&a;)67*)) zF$Dv+?SoZsB9HO%E1-DPWZ!})KJ36u^4Qr!W779U!X|QHvCvj12wVaC!iXIXxu)xW z+S)l_eK{gt;B5;@dL^3?90k0~*;v4EzbUpJNm9u? zB+mr8V=>V1D7qU($p281&kT4mAK3P!fh^=I)IZ-=(jF|rdvxF%_}IDj;hc=wB2Khu;7t-=%Zo`g4=8taF- zkoNyF)2T#w?Gqa(grVw79y>!z2~d-hV8oXPNaU4dYR#_s z4%)y4)z59P+EJdPvEUmIfx@YLzH3hD2Y27)UYl@Q#DYe2;#uo$%zVi0&D0Wcx_q`$ z(@*n9e07&cLmKAa@J^RP3<1DrrF-ss-JH}|TTon@W&>Z5!sYDclb1Rw5-aldF2}}R zq(~)&^$c2m)bd2hML_;8oTtKk1Rsu$#pq;bJoePD-Cnia^fSL$v-?PnJmkk`P_iPN ztt*&Z(l<{WZ!m^*d6^iS?uX$+1hwCYHi~81OT;scSa4)W-nIyed5^1>e$bxj5szc( zrYb<97hnBT)AjrD4K{e!M+5x5@WO*#XU0u1g5HPIPf6dhGAvaU+fA`h$Q6D)s5Z{` zm3w={aMD{bCo*O}mG<=o%4)xP9h9X#y+NTMlKpsdqn5}0cBTlK3Ww#&3>i`Vyd8h^ zzo22@a|1mnfi(H$t$taLk9DbIAF~ zQ6*a;k5VuM4{|!^BI)vtPsntvJkPbjr`OqO9$T_(wd)BDASE)WI_*zzou!K<5*1KN zmTfhm;BlrJer>6G8-aDONfn^{MGCm>Z~~T9DIP6{LNCk2>8p?>GMAOv~(+pJcp!rxavka zl>W{smT&w8gWks%?^PQo^+7j2gYEj3v)h^9n>z#g3oO41R#&A6rLi1EO_NB;a6@^t zghsX#=lTK&*G2FB3%cJdfIM{P4igYN+09nlc~I?JB-aP;S?6dr1iuJVouu@*Izxc( z2OWr!5($%m6ZqGu{fKm1giWl5YD1urapV+uyH`%JZH2ROLtE@TGZKmnow>6vpXDRE zCy$pH794FJhh5=ZtI@Ai{#P3h$9?C_2_pf=3&m$+Nc=caRQ-i;hYb;)1xJcr&02yv z76sT#$gAUMA&~0h&8{_@wXUTu6U1hEu=kM)nsGpUl?b}y*-GScrJ5)KKnkL80W8mp z2FZ#JLNcLyB&rufGU0hX(FzrVJFuL?WD>E|A$HrZ!qdedzA8M+uK@Mg!x@*ptY=SF zkF&tXO(%&#zgq;kb?172iBz*b8cTYPqG=$hH=15H&_KP$F3O_*TB6lI;KdK01HU&8 zb>q7Y?T+Z>fGxo<_Q3mcw?Xu4$TzP&&XOs*27%U1ehuFfR8^hlqlAyRF1KX|h5SFD zadk!bqgxbm)w8&qtVKp2>z&dbG(I?G3l16A0S4aXIt_J)Jm=;h z+8ueSr1OGhSsKrAH(R^V6A90?*(YT9-F(p-=c6qYdSn~|5x!jSCRt<{HMQ-$pSpQ7 zZ9#6bE)Z;bBpiZ7Wc-_K`G-Xt+XibB`RX{#u$T0ppTBC`xH!Vy%|*4QC+6@o1cek` zjKdc>(1}(e{wy@*!pRt`sK{lvPV~6H26t?Q>=~VTPxPzR^p{cpILU*!XdYlzdc{e+ z#sJeaed&`O^x~72(!!c1tEQ04@g}JQwpTBE{pLQElSCD3$@P=3+lgngC z7$wZ0@?7bqK@nOAw#I=w+pF)NxKw2_gmv@i)HkW}f0%p#nA*YYkvi)kaeB9wNeVoF ziw5=7(>6T>(CB}Zi@^UJA388P31|&?RJgD1OJ-Pn-x9=?wBOxUe%bC0C{T%0U#oOl zEk?@$dh&yGS63nFPE|$~%2h}%K$=dK!$y62ljt7~TG=10tSI!^2qLYtN=c8W_x8CM zE&O=3kG(dvN@yWj|Kl@aXA8n^m#PKI7!k!LLhIs`#HroJEYzyMd8wLnxEca7sq9zOs1vL=Y%Iql=YTC62(3%4yjkm!mRKa8IvGQHGHlJTBGh z095+b`9_Am^_&|`X>vHNI=d|Oc6T_rr1Gp0X!(&R8kT5iFKnln&NFDGQF`4lS8UJ> z2$-c6SfdNdQEg-|V;KyT>ztpNEW(BxM&4#>N4EjfLQ2(&3t%v0(D~iBzVYb+=P?Y}Sq@7m!3sn#uDi{DEfEi)1o_oqsD5SvvuFa%EfyZDD2% zCu5y*dW#BeL)WdLm6yLwpx3qE3(^(-S6H+a03uQn+p6Q&Z8)G2RJRQ&X9XaiAe8dg zE*8pgOVL8;xcn5sHCr?0w9;=;{h8iAS5=y{aGN1ZBhRkLbHvZPgwRGea8gr=w2=5d)%Ng`VgaIzg)-^|swuz}8 z7Wzc@>gH0!XbPC2$pbQ{C^A=Y-TFc^{dM|>pG5Z^Izowk=b*lF)AYK4BWxIUKg#d3 zk1}64DdOz?^MQqE|F`L4*GhW`ZQyZB11#U<_8;h;^heQIysl}>61$qhwowPhF)s^| zJ@Z8yajQcAndF$S2H6V`9ddxoVLM@Qltt-dj7}`yp!aXdT80p})pd4CcFg{0xClDM*Gg`Xg7U+{+3H&aT>r3^yb* zf^k#%eSLA?q6&00d29ZGip{X$#`Lpl8!eb4I3p;$wS4v@M32dp+^ z0mPglK{3LlJtzGRhX%k7k51hePG;KwTt5$EH(O*22p81Jl6w zCG!DumO@^D+nCZ2xx6bUT{md?hqxJ*kPS-o3SocI?~A98*3l{O0H@#kr%l^YH$B{{ zn*e?*?1O}T^~`QLL*&`SxrAK%tpvwBV% z;im^ajJ2OE=nlr^u${WT#$2yHoAN#X*HdT=6-0c(M=H9jVqy}nt@VaYZENhIVO7Rq zUXgj-%T{j2cwB^sPo*}_)?@M|wyZQNc4Mf=!0~8Y+9_2Wrx*FH##DL8BZK`^ILUk8 z1$$#Fwj|fu@gbz5ks`N>8Y9oBuP#*a`Fb^M>Rp}W8hTU6d_E*UIG2oO9SP9=?7R)8 zE{JemT_#*O+T=q4YBPQvH84$3IBD|sCH1(e9d%~KP?qCl)Qx*%K`-3DQf|h|$?%(?>rIWr9q7=J1RDhS%<1501yW*~pg+;W)Y zn|h-TVe+w$Jlw+mMZb6-OIkFKDg&NChC?s1GrzkBrC!@_mrEgwGEu*3B+QOv9qH zevUWX?nJ^N)`#-kDCrmHl9G`&zLwSn_acjMT`h&AxycN-+;Eqi|4O&6PtxS!LeuB@ z^FmFJOLhD0vn00?;yRQ>`#=w=rtJ{6g9mv81nw(c6`Mp@Cl^x|^!xXqmwab2$DJ7` z)PmI_kWBWkYz{lF?JSpFU}IT1>~Dxb#$lnfqBn*?xwH@WQF^G|X9l*rVgrN(j#E+a z+S(ewGL+|j9ESL-2#L%l0$dgZI{9kYjNkmqQbgv*=^8kd{D-;HGuPI9R!8hN@SG)D*3hJPfXs2cg zaC$eqw3FM|?YBuF=0WI8V%$*@FzPkjG<3CR0f~N%qC#wGJ_9@ciT5#PP1?F2@o(ap z&Pezm;eI>W-F;+g5v-LY@LjE{m`oKw;yZQo8kn7PnK_@vpb6Yz=3=)SDL;q;2nb{I zR^bBi@fS#WFFQh0!5U*-S`vn5?2JVLXiGYiZ2ldbLk}P=~QRnyXBOJ+hr_j%diC z2}rfo_HL=I>Ul4rrJ7NE-o8#6Q4=LRii`1q25#P+iNrNMvCN`i|01GNpdOnd)Bx#YNxa>`cj;pBdI_!p08#o_ zgCCk3ppGEM`PzJ1iK;>Ys-o}$K}_P~x5)QrR4S={%YBe3X|iaDUcNGGIH^OTW)yj% zlvRFlhakRq8Rwu_9q{3k`5{2QqCC=}_qn!(O^wcex6Yw+y`p_%sCA$Z9=}o(5EL`y zXhI=5HxU-~$|QSY-poeR1F;bVE+?jwIU8%5ktX!p=d*5U=Lrs(mG)`+w>J;EOrVl6 zrz6M12*;qiXW9Oj)Mrx*`noLp-^pWg!?v5K1O6XPgS*5 z!h{&O&_*@fsATNKp&U472+HGAQ?B(VDSq!~U*S3}E4u|;U6CT4ciZx?Vsik%cNb4I z>LO~m;&OV?~q%A*?0=N&@^{~-uQ@QwAXC06Kv1ycRzlHT9kiB z()YKTf}Bq67E95yj25qOkr~l|yPq0uS&NQxzeK;=#qPyeUyJJ2Y+i}!RJFRob(M-! zZ*OSBXnwfw6eoex;p$y;Y}e+9$?-CVv*5Ajl;9+*&xFpPpCG;&RXCX&$&|x(1WJhg zPduMDb82G#Ch*IcBj!U7hkt%Q$4#d+i9v(9E-(1LgqlvKeglW?lkq3jbJNZya4Rtm z67Cm9F|KU4R5{{*vD<&KB0gez;zW@9Izz>k5N-W{)YONZFsS%U>jPROfcT*;>;6$T zB{9|2&2ro7dr&RVw~ehsfvp_ZpU9QL`4Od4{LM@%`8~hfQBn<)feIALqP3RJC)r8r zVQM0^-dHhm)F!L8ifLI`G7}OA3WcMn#X)hg|+0}b4CM`O+R^29;}(gL384J5mF z5z~{oW0MiCAGFA-P#=u9b{UUsayxBy(vDJB7(8exJe^I_Qot4UGvfC}8qg3K|G1D6 z!ZkW7{jfj!8^5N*Bd%bWpHzDoE?ar%l$CB`VvGgWJGCAomi-xt&V3k9XQql~Gfqnw zz?r^oBh*(_w8C6E9;%PNn*(k@of14rI|#^(Up(aiXtL)og9?ovyRy3HMgT5gZr2^w z-k;q017AO}~8Nt0e#P${K} zJWJJSJxKS{SiG2Xcae9SsW6|~@k8{1L0g&k9|3Gy6v6>%;g1ktBSsF(Ki}Q z9o0|*2d_wA7hw2|LOwe+g4(I1Ecjazuy6*HHOx}qq%uNwPm_r}6GKv3u5^&$)ku1! zK32m#8hMDk*%?8B zzh&@!uJSTi@e=BEvdb8r#D?I1oZhSUkH{D}st0G|AM8uaAMO)pCh$1C57e{2-)lIx zelhUyP{NO)O>A=8@Bk_*+m>iJ7&nrcB(=S&^Eq$vNUB5%HcBWP&$uZ8SCo_)MzNcp zMv3-OaD&6HJ68*810JY#DKWfiV!-0CSHO zTduR{qgG}8SRh#oS)&0YRzgdShSpimF0DP+7G~=%ZNJdFTCMaYh!*u4xcRr*bH0SX zOp=0%=L?Jxy?gw*g@GIT>7G%&DHK{7s77YiK+DRbhFOAqNB^0LD50C-d8m!UHN36p zZS=P&*V81Z@p$#ydp?>LJV5Ud(4tOEh9wf05*-NFUm-ywxlim|6*%<|O^1|}&CnYA z-zuk#WfJqwPljFFx7D^3qt^s~6@Z1mHvW8K7_OBwq4)O>Onplr+QuZDmWBRPVlG(# z{UO%QnkL`JaMolDK*2;e(i2Ox9;u-q~B*Fg&`1!AjrI0bG zG?JE4wVnK9sr;?ZO+pFIW&i(M{4pZ`Z;b`vqGTdb)$Gy%gz9e)#lOia{+!gYJLDFj zMk^DA>^c{Kj=fw=r2b9){r73j5_CI*W&?S#a55388pnMZKbKUlNHt2>KT-S3udAmaRm8&;>ZH zD)c4Fet+TX(dJ3wK=@ZF^B?z%;2n~!K)DSmi_|32)_Hfyczx}gt|4&3rLxKms`(r<$ZYzB?D4j}Ncd7xhSk)ghwG^rSlDxds zH@pIp{rmy~yoh9AVV@ce#`(#mAqYjyx~#=9cIzD;99*c5>-W#hO_g@`EGG#cSkESK zWXX_3x%oy>%)`lYpqnkV%pWhm(Z@^S+i6l@j)U&*AR{5PCP22wY_r zv2n7(8q8An1iiY3M%}|jJEL&SYh=Ouyv$W2k*NfG~UmL zC4e24VV_0>Ft6$$P0Lvz4fsPn^nYBd<FNA!DuE_w7nb!EpDPc4?tiP3{+v4A+B?Xa zMgoe0&;MF?^>^!Tt{-jd_eT5gYep-$lk+HMctig0CH2R305I$+W6Q<=T0JdPcZ+po zLreev&~5xt@msvBx9?vg6}kWW^MB9p{$2Le(T-gS`yYew*VgQLCIQy=|0jkcto`(H z@!!6QB{9Z$nMuVL+&9vrc~b<)IHTdud*ej1({u+mQ z383JAz+gGA6%rJ}Y26S(sXVQVi|P<4JknVM1qt=*JRn&%25oXOtqXau-}S)N+Y8LOfiT7(hIrdQ)7)A_nC<&mQgOS zdLOKo7cJ?a_-hMPdje)IAHm@H??>cVQe$Ydpx$aO8{V@!PmlJ4W2NzGvznJY?msBm zU8zZ(ZjUiilH|r zc#p_+yT~_H=d*PfyB(!+k#!mHlF}H>j5fB9d%b|UF;Ji2_H6Ah2;#gC$cRIISc5qK z20=W#TMZ_fG9O9M``hot@d#Wm_(I4c02_UalsurZgFzCe|_V)6dqsWf**= z*L@ET@t-23_Gdtzoyf?%ejVz#o>MHGp*nbskpknSq2sC&4YR8A z)i^PkkgO7{v@bO?#?%uP-VYmHM_9X_G6FZg3BMrZH156%UNuFb+Mo*L*Y5i}# z`HGh**Q?!8iXn+A>9)clweg4uM< zSm&~GR`eu{MpXgm27QO~WMu=MQRzSoldK|r&2C*zD5^HnhfnkvmMSKU8~u<+t4ob6 zLdzw2%xO5L-a)z`!{-G=9tFx7o3(QE(*(j{BtMawniSn~k=TBVHT}PWEmFX7(5d6k z^^6eL=!f6dLGE3@$LoHvD>LnjD)jVDrO-J@m)(}#5DU!smD)-*f4QA@*4 z`gw@6WYUg0+*?pnxFP`?pMppHOu1n`4*6X`3v7;{gJ8wIQvf~+l}>5jTxQ> zNCIs6jz-D)uc0aKO!DRF22#FY33muyUpjvI`eJjRRhh5R^09FyV&5?;aov`eh^N`E z!$nv7g9{M=pwDxT#Wuvo+SU%j5WnzX>LB?ki}Y#_>Oj2w4PLc7kdBk;4hCGVdk-#S z#=4uPPdFibB`@C0)>q(WrcNjYH!cA=vvHpD{J%zb|5&%*p56hWUq+b&p#; zNfoSkqkndql4};-nZOZ6n#uQx(QJgocz9g`m%oo%*BQvdk9iE1Wsy|&8`jv*YwLYb zE;R;r=3FX#6%%HtM^^5Zx8MF2!;luiKko4#z`byr$B7Rbj`xTqGAB{a0@C2Kv0;2l?zfhIdTlq2cjL?7lRdHOKovCJr)@aETh`KF4PS;)TzVxp}_4q?$v*#WM|cUKU-N zc?w-RcW?RNSrIl=JR94(rAC1K5M{hjJB6{T{b1Or!HK$t2I=Amk}Li9e(b}FQRVxApnYeo_s@{^K;Iz*P^Vo;wq`EfLi8)(Y=e+Tra!%3 zw&EQbf?oV!sqn;keQ>w-J6F1I-kEIUXYp|&=`DH0lH@2BAVhqb`nAgw{6y1=>KCxd zc1r$rDpu9R~G9P2EsU z9FwV(WC6ST)u-B-Ew^VGlnT)w*AEB0^RDdSywS!^Hpqd5HEeQ;>|H>#dFC~btK!-v zc#O%{W+_S1!Perx8C4Z39_u#amf=R@eCUqxq^39V7XErxT756OlaG`&Sk5bM$OS67 zLikz9>L(|jpMW>_^`Hha&3ELN17MD*^4>jvq@09`w-}l}2an%9U{M>=l$co>@ZfY% z6j#pSha9_objc+??Aw+gMynFEp&SWlEZ$njGCbkxP%r&)i<7yNgN;#cHRpGejWQ;` z0(|$q{NSZ>kK6k+0swOGBjxcro@$PQU7h(-x(D2TNBpRGP;J0TaJ1370Zu@raet06 zWGc;W%Z0>dRgCgx)}1d(q=m*tLvuYifZ->zOGUs0XxQw=GfEdrAQX>dis+(jQ&z*t zuQ{FFqWFyX?5p}Mbad89!U|1Y`{&Jv|C|qqo(_*c{vEST%EYnT$P z3TOi#J&HutzCVK=1eL$v?@bSDb!*qf6q5RfNtWqHc!g7Q3nB0KMKHKsDrK7%Kr&m~ zUVrkrp9d%TQ?Sm7ycGjJMUryKn$yTunIeaA5faWk_UN~3Mh2Tz(;}C5y|PVfgqiP# z40>a?AnNUyNfj+$fFiW(i5XtxGb)9w`}jAdgE|qt>NWN`teR|jc6~8Bl(bkdyUhXF zj`moVFgoEg_B$8-7{i72f+256hmyZ&Y;0fh`gxE0Nrtx}+fFkOCr%EvwsL~A8x7EJ ziwDMu?^KVaaI7uP-F^kB{4740!45CDqVI)S{Uy*ux&4dS^>=f)td7KEcNMre z*~o&UUOV|fyA=H;+TfryGwQU|bmy6e`ELCKnM|!0%#+*Z;lr_SbL}U&PMJ`ZrChd) z;A!Z4mn#I-Q%+1CZb|c$lGiM-5*SQOVLl}CGs#gqXAu=+)c>a5%z>g|-DZhY*{;`# z!7=`6qV4y|orDD@QF6b<{)kiK_r|IPtgAL_7E&Ph7&=8c1DB=HczTW2eT;SkHm1!Z zQE}Pz4C>OI4%-}S9nJhwo`vr5De~1c5hMTjGBl7tpI^gNJpz{~FJZH^Wl!7d{*21e z1>;K4m! zK2`BJ%06AyCOu(;D%jO0M81)vUCaL1ZN5J+-RD15#lH%N_7>2z_>b@7DIf%5JtEEx&Oft(o}Yy8?PtF(PG>}sWeQ#e!H%{exbaQz zxfqhI$2#Uw%W#v+JMh+|NkD1RCo0-kYD;L%bzXEyCa^;knd1|sPGBWMUe(bEW4MGR zul|IT#J|YBm&6jnL!w%49$utN{upL#%4~lz&5AaYRCIN=4V+hv3IY;&9By2Dj;7f{ z%=XM}ODwB%tL=PkYwD+4REC@KX!n?cx=kqjLs;e&1e*tfFvorcdb~P!VL5$*86MRi zkruBP*}$&1XTHvhMfoP(Y(L;VWxM{!>Y*;`YBG9%rx7amfXH1dn;Nw2i}jNCyDsvr zyovy>N0()LNC&OG%295Yr!{0;jssI1uVdAv2xZ*&N2EfE(N%MWwEJx|3VGd~mX0MT z@7;1Mmpb}G@XXM?a^hZl3y@1V+takNBe|moR5`_Idt^l6kLq`P#e2e~^f9sO0+ooQ z7AJuyn@b^)%PsTJe60mKO*rRv$=T1GFYcl9qIxY|GN~^`#IFP^0GI0pj>FITBr1ME z<&&eo&N2dEnsnHwI2Hb!HdxU=45y5bHSvzyg}kmWP7cuS*I#U^hA=hJCcC5z5kxRs ztOC2K7_xe5L2t>vc(z&;!?FNPI6CT@W7nDxLB(-rJCm)Z7u!Ze3H{$-hNqCd`sYkW zomwBO7L(ZX!h~~!5~cymA_Z(FcoI7;KiKb~$c%wC$5Y2Ty62@*yxb|qU=l!Z>cDLF zfkwBDCNe<~rXA*?ej@>{gfxKq_=+ihUdmN2VYfzqM%7zG%V^MZAcEN!V5)3!JZbjx zJE<}3Y$V=R2rdt8_XzJ3`|ddM@+sS5y;;o}Oxg=|JlA<{lPGX*?&iUHqukxplWcN2 zQ9|c5EedGp)aFQx24CEJ8nre%n0`G{(B>XhUUsxss1$Sm`pef2gvwSeq$(ed9_*z^b{|4DL*WoTD##I>xQ%4` zX?^#Q4oA$aHU?J?6xJ$M5*&D>noe)ax&j_1#X889956Bh4ZVPiQjC6tKLIo|>6~B{ zGV_)V@6!F`95ixIVIE(7C7gTwUYFUY@q#D#x|4d7q8gDMjyD9Bkd{Cpa>!+WYS;a1 z(Gpp}O!H%}(GXp7$45c09v{@Zt z_-*kFRwA+*oLcSpO&N(<6!NU&gpI{{u$ji2`K=POr%;JKO3MWq!r5%dDAcn*2|mZN zd+Hr94xO^_D5TpwueLK$*|J_aGGLWRK(wLRo+y#LIKi2JrulQ$Jh#V`@k6(!2jg`4 zd)p~%d%AY|N;~1>@leFyLo4*i@wNvdgc9%j+w%dzpH~4B6|+`|INA}YY)TymDt15m zp&+E7)X$v~Q=!(1@m!2E9t_KdhyrPV-cnmHgA4+a0g3bwZvLTafzstC#&DXr3PJgH z2@9dlu+}tt=nI*mw`>W}^xj($Urf!ifM_<8;45~gcr93P9B&dz>qCD|D8;?Me4ARd zsZb+};1=$YU;wc&*vXV0>De?YY+ou0*S*_5TAYiPw!Ea-7Cs*}chGD6Sh~*Jg@EJ; zt977h(M^IKK5hj zj0vnp#ildWcRe^>TDB^s$QcVSU*sqi#Vf{Sj_I0BB7A!DNGjbe&W$L6`%H^!V=}Ko zkfp!OLpag-fZj7X=ssS0fAmHX?xH;K70a_Te}K*V(fz2X9ENU4Xp{5mQo7iDhDRn} z&8Vqt^+-&B^aV@pP&T6B??-phA6;jv_d=5X$l(z1q47pl0epDpsM%EBMZR*42I9TZ zfJcTq=0D}q#p5;>R(zt46D=Cn7}r*NV?{u`jp)$|X=iTEp>i96 zo4KE$4kONuR*yDEJvsW#jYGN0xQrk%c4&Guh6$}7&#AHuM2zG~j&j^D_Oja9smI@y zTYe@SGN@%xq>_sAjY!r<@@718ugUS{H7TE{EB=ue*xAi(HlCdLT)*MUINGfY3HPL~ zz1U5AfE?ers5iH{Od&@BS@F(l>a(gvOYPw^Z}hiITr_bp*_OZ2FaC-5YFx=K$awGl zxnvPO^N!^gT;@><9tbhNlr5vO(FY<6|YiD;4%Tq<8o_EsZs8xj>xiv(UZO1nqGwm(8_l=;_@Z{BU z;X9q(Bp>i{P|H@QDo#q!aqm{Rc?nVpnzB@k+n;>dHJ%i5shk*_SVL|Gr(!`!S;i9Q zuVf?%8OTIp3{9*S{aJJ)`2gcsb+g$mS*t_8kescULzLvf!u$9c<-FAw3fT*f`w}~k zo+h!$zK2M9I6+4wxZgc~M1=P0B0HqS z!U5BND|@G;K|G11p!Uk^z}EB?=*5Wu@wpH;-hd+67yK-pe5c1=8gP}lA74ZJ2khA! zj*!Q(FP391gz4!w z{+XZm6p(BjV{EUVHJi;XkxV!=?{=OUF~nTmH}pu{urqeMaji;mHV?+*B6ArgfTfU4 z9Z*MkP}kzC%IbAZe0Jp@P7dcef}Vp!-@Vucd>&q1p5Whw9$Q)`%5pKn;m$WVzWHVp z5w9Ph5@|lC;ReFm@as#|E>CZGQnzYb!V;8KM^{{Lo4Pt$3fc5w_4_4yeYv%Road~k zfr0v^`*;2Y^=_9d-!I%F)t$a;-}9E5TDWgV+aJ%BQaRnmw1)p$dND19*bw%*WLt)nrHLAZ&DrHzhqQ9^~LDRHTT3FOlKPmR+?SfO`2AW zaF5N5R3?_u>`8_x$n^cEXxM@YgT?mqOCna3otK|ae;I>o=vxOqkB0@hykOAQ7b8I? z?HWNYedqmdTZf9(E}teId}k3FiEP0YcOIC3=c}d$795 z$Ek0+m31>EflSiGYm?Bmq>ORQV^@i&P?kCjxGJK-EkqsTRC=TY`Og_CsxNsB*=c_j zNqKeds-i^|_wacBsxgo-z$lyd<_f~JHa>52K5|t=wl3lgvQ~aC5zk6tGg50a0O$BF zl2ZA{_K)B3%~xKcJHKl5Bv-t@X$MH&!>lWui%pU1&yy#)yu8_FJA}j69DRozjV7kX zt9fbjhh5g);4}vD&riaG-p=uTp}+F=A7L?Gc?#m0(?F9fFy)~1<3HA=n^P5fH~G>G ziGIaV1fDoaKt*7FWz$3a_iiLh!hzL&-&AdHX9n{t66Gpui0MSN_yF?Ll$7TbYDxjm zX>tYo%qCMk1#|~sqbSv)c)j2&NlFFQvRPW0B_-?Lh$xKe!9O7Sig&P;8zx~ zU7NmU75yI#{Jb!pckS^4e!o**f4|~Ce{KZ!VX?>$4gacq{OdIYXrO55`N)dnueJ1_ zmH+1tcP;oOP4m?M^98VXkr*1G`Tvr75ZuQBpTU9^|DRgazeb9{lNV$x;fPwG|2+i1 z{-OmYwD+NR$^U!-+MSOT@2ARy-_@;ueb0}urrYE-R6GKF!gPYO6)ajt*jpC8T|DmK4ZM&wvvx1D$G{uB)68Gvst3C7vm$Ad> z`JJ8eXO=<@GT;tPW)k5&nAL(`a*@%*y@>=|DgjrdF?Mg9b7j1!(;E}FMDx;vSD7;( z3?thAthQJ-V~iKu33jyGFLy|rB#FmWbT#n{>aF$7_=`w!SRF+4M6+qyd>AYY1^@u@ z=TCY}=Zd;j9>%rMW}4kN#BI+w@Y;M!Hee2Nq-WF{7ZMe#IUel^dtbKlV#FLV+q^*{ zfyS^uA$*Do0;zSfc7lcv?4k-n&^|`>ZE1UsU*rrWbt`lRtGlG~)=z_Y1G^yA1@LUz z?Lf?DLYDc?no|QF4W0_@piVP=Wd+S_)+FiI&Gbb5J3|Na_L#^O{-TIZ>uz03!6~I95XUd*FmBJQg;9at@&cVfn?x}&| z>!$9pqq$XM^D+~KeT?B9lL@&-IkRhrD*3ZzNPtc<#Xb6*!sr z%9Uwm^bVeeJ+8p}DOKLZffM_UFwk1#3QR$hLdEU*jYz6*#x%TlO2(%1UghWCuS} zs9mx>TOwI(SR)T;4}iaGc%X_`?Uk(BvCu3?BAfoDYlzfPU_Wl}oz=CEc(KJC9~)@9 zplx{jdGJ0wJgfcHr>o;r=ZOMN`vGX4RFi8M;N*hN!q4)zzz(gWcqIp}-DtaCKZi8o zK!ATcK}2z>BSDUDRZE%Y@Ra=_cJ1Rgw>9c4rqf`Zr-gx#D$`NubMDc%9<@qKX@}P7YO{=&9QJAqhJCNn zM4pG|iaafyf>0^yWo?SsOQ+GvR)0zrbqI7TNu1m3H(*PxzPBbneXx9j`vzryn8jk2 z3?2n9jJm4^;ap#O<$^I@@%2o_ZSox&A|a`Ev+{8BjB{(uLI%bX2`3zWa$aF14g1_1 zIa2YqOd`rZ+T%k~o2BWk>6Id4=PPoXHYAx;q5DZ1ik`tk5ebC+?7f2#x+heblM2Dx z%KB^hGn<^|Lp_!Wz>NB>`U7mSgRdD)SjjcoJcZEQPYL z#8|UFiu@;z_R(j7I8Qko^?sP=$n}*W>#DMz!e81sg;%!?xK%qA{lFU|JwO=+CrJ`k z1zWzhs^Lxdni4xn#MqC?D-`54#f>p5#vw$(;S8{Qb{tl62tI=F5*7-hL)Sqmmzho zYAKEirn0C14_$8=6=n4HZA*hRNT(nu4bm`liGXx>cSyq!(hZ{0-ObQ3bW1lwr*wmK zy_fg@$$LHT7Z!^Tvw&;wYwvyT^LHE!zhZ3BVg7(B=y+Mc8PxuZZH0fl`NrEVjG4EY-)$HzT6tWz9Fs}Nuf=H=>PS071XP@ zi4k@*($3YwWhgxnX&(V$`v@5ZHfuf%3I4ja8%G$xG#Hq5;VObAISt3)3>d+S3YgBKys|MT}I|QaeE~r1RH)SAz zk{^|j{WwmSUR>lx*b^AEs3ESNC$i0SqoWq;oeYw7aVWkOrptWzyw+NpC(SL@^`s`7 zylb&@!w^@WId!F;b%gK|Se>$vrWmH7qI5Jl+a(c8{YBqMG1Yt79zk2#GVtkzmLs9l zm-v2bJ~r;s)k^K47$M=M^yVnD)ou~jK=uqD%d)IS*+7^;6Js-=yB z@l}g;7QNhbY&8blJ-x_U%Tcw*cS@i0fNf3j=rlfpgC{tzI|2Hc;}Rb1Pt7%S@yu4b z*^4W2TL6ZwKZRmIF`b4FW(7MN!JHGXFl4v?NZwG`4lF$-lJ{2@%2B^}K#lELCkr4+s#*Y=qx^8?3sfpF3z zZTGWZPHt?#UKYYL*U?X3vV>_*ugT2ZD0sa#d|JC~}rL*&lx*pWn$jYyWuP_JQGt-gmR_vzyJ8p<%E5R#tQU zwzYs(k`!T=SPTl`&wh{AKfIN7OB|MqB=YR9hX3pI=rx&Om-ybx^e8Kkaa5`;Tng=KD}pFk}GN$)Uw(}K!UcYC1a`U$F-0=!n^xo&Jgs!lQ7KtNi z1pV&}iA$YFs4P`X31R9R$5y*81on>CsSbAi<#&Neaz$M|eXNCDO9+q3ik?!@7)~Qr zrZ|mC^oxkGmxXC2e)&C+D?q@&9N*~0k1`G9s!FNouhLlvP;s-GqfS9O9)r zWo!g_A@aW*4WIfLyw~VzAgRkcO-Qzt<{Zr@7UlYQOtDK!i-&9rK#L92FcK=-+JUyA z+1<`%d?yjk-9Cjs!A`+csGYCiw!UKXfSz?~#r*Ev3WdPFYb3Y(FhE)45^&h}C*}dR z{&vk)k-ygT-9Qk?>G;aC=${{B6(fN0n8n%^m(LO3FVi8iw$QMwi)Z9g3jDUzlv59f zw`ysL%G;AvSFkk2?+NA5G{ou~IF|wa!T0JViEVE@vF-?1qVTY1@rG*2 z$8Dzjomk#5=}h1$oUb5>_;d}Ajf(pwhJh*ghOw?!srG9pz2aQvf`*^bZvEjO){=C6 zarm77gaZK%8LvEwIu)~_U$V_CyPAerE{ANPXX_sg^ektIFnz{~*&+o!o{FF#yIFnf zA9T(6lJ#;2*p#+|GZI5XtgGYXAWy9`?JL-t3RLpzw#LbG0m~{CD$}QbCqZ6nI3NCU z)v{F31T^HGsv)zn;LKj)X{(6R_zTD@w6(AmHF|^S)|}~iDhhjTP32&w(t>9{>CZDl z$CL@7!M6=M+~3q#qZ$VC?BQHzTwTNOFxV!9yl-G^icx+qswd^T5|9BARLu_vp+s=M z%vS@HlDfWDY`g^sP2{)78Jv47zwyN-2 z_e%v1+Tcxdj7=;*g8uW4n`^Slm6soMa*Qi$-s&E>Qqp|w*a^QIe0Tlt`$m)A2eZyx zI}T@{O_a7XHnDp~GJqNqxcG4jAM6N7B8|HsauYSZ5 znYQZfjHao2p>Izwtsh_CsmCJ#`;|+n!)?NWn5Sp;RgD0Qz*+R;tSNn$W9y9!OM^f| z?+p@$9O#E0rSay=q~CkhKV3C5Cr$km!%jMu+HEB-iv@$>Jm-=flyM@?oFg5;RzGoG zjso)_@IBV5IHEMx192c%NaffiMa$md8v{3>eQUEUQb6Iu4T@e;55yn|hYNsK%|IdR z_o85#rCad`Uf;E2y9m*C#&4I_Aa^Ijy=RU0Wo@QQRU4*%;zTqDN$H%j%c!8YcePeC zgx$+r>ugu*Wx63tu)ox!569AjECM7!ai!~U$jKqT3Qz- zp&}d4-@xoA5wPxV`|-TlI1wzlwo5=o6p2=%v>mGT@e z2Q8T&B@>wJpr2}r@L-`edN*n%e zO6G`jYfhLC<-!*En+>YG{uh;E)dyD$ddU{H&f}$SM2(epz)FdJe66{?DR90XSqOGV zEob{z30ZbRq<#&u!k>2f79OJUa3IICNw6^LI$UmegN4n3X&w22?TxEq;06*hn{NQd z`2h8!QO6gyy82$OqcEbqMGv)E?RE2s6Z<+NWqDpvPu98@Lo$E|V}c5#zMsDlg3f+K zRI};}?p>p_b1xQ4Jf!fKUjKTb|sx+cPVhEkt%y(7+f1deUEr5~wXe zQr}+NYFkTz_|b~>?Q=#%kCpx ziW8MGw?j!5AN3VZ>Uz1z*;*l^+`)XI;kySGIiTU5W~}8Q3=93b%$W0SN}>^tfkv)s zkGWfoT11U3z@Php!#?t8p-puqTN2$e4)mBkQ>m^xReEcZqa#qS2=GI;-_rGN1Bu+V zzrY$%46G4376fclWXAUU7YPDbimk7qpoh%@#S zbE#c&WU{Q_forHB5VoNH^j7-&`K|cJaLY3@;6}Z_44#d>e+h2syo_z+E<_8W_Hw3j zCYDus?@~)zOs&GXy%93TB7%yFYJqWir4sq==*w^d6HoSij)Y7`ntqZpAx0*hNNm>8 z`DS#IjpszliD4y4a5Y}_xqS2W%Ep~$y)#nV!5Yg_xk4IbY^$J;rHYqWU~5!c&@iJb zs$D;vbE(m8#lfOa(LSLn3W%d~~sw2HW;D-UxsVr9Ln-UUX{-k4CI9EQj3SrE$)SU16vov|2BKboTU zVoN-9uM;|G08q3x@0S2AW278=qdkoScST8Ug4xSm9WB0N5*Vgd4=EG2I&T@Ix7Ufe zU1a#!egs6DK7i&Zbop2pWqj=l3>=9ztfTE<0Z0Gq;0L>CwworUIHC4@MM^`)AKwqZ zZuX*)i1@WS%LqfT^L8k>^8rkNGJpO^Y1v-eO`TfaV0mIl6d{wC0=6zS5u{T=oxuZb zWUhQE+aM8ZSU={lEKIu;Q3I@z^siRxfB~QK5sKpRB}1Taso?2DqkWvO*YWhJ>H|H@ z%M+m~0^1?t9HjO5;CvrF8dC77D#CVOHO#myzU(mD z|6InD&;iPuEqqMCjj_*JraB8^kTjSlCLsEnE9b*($H{gXjzC!cmUUvPpv>&}-oNe@ z;wjS>-5Wc~$>J;s4~ZQ`ZUQR+-K^AJIP|!y@o*$;b;^rk?tK^C6u}z#)%@&3&l8ow zSa?eKNih!m+gnp7+-MxT)7%#OCDt-V{-2f!R&j}w3p2c=9wzVt~U8vxK zVj+K>PI8kJ$T{-GT0(eBakbfh)UgFQ*@@HMe#YOwcUr1FKasj?Ke22wOx;x}zKR|n zcLtNyz9eshq;20>UitRgpM?YO7QlEr1`V#UTXcOlF8v_%?pI9;Pbg4bMAe0FCnw=x zDS&?EZ7GYm_z?9>^mMU3yzh`RU8o4d?ta5qvlxQNSbDtDXjmwpRd8R$>;6Q?*{#~W z@}Ika+)qXw2k^!tt#8uI8~4$u#C$WX-^-`cqwbw>J$_DSJ!UbWl$MDlul+$fj$kWA zJ~BTnVPs^O$YDAncfv87@PU;j&pXAhXxH|m^Iu5{!Pn;dCkzLV)RYv^ifnM<$`;$y z{q4iXaBJ@-7)O}T@x5ZNCr_d3O05qGG&q$!iW<6#90&)RPf5zp@=eghX4-s`psb&fAHP6-X(J12a&j)h5NB zL(pvNbTUu?jXMovS%`p%9z^X4?lv-!#g2bZChx{IudWO^Uz)hf5e-6}mN-E(M(few ze72tA=BA_&lk+ES2S-!Bq;}??-idkWUR;?Jz>{h}cr$W;cxQ)E=oj%86L?7D7mE0P zQ<>H{)09me2|#@OA<7_F>R$$;z^zQ+6ea21j(Tk}bFyoG^gGTy{`O3`?YQp%=6J=-MM? zbb$F{m)Pge3^O1;%f5B$OqyzQc@MnM!4=e#C92O`BX{^B>-+FMcbxj>z!)oNQ(3QA z9}e8l-ECk4N#GQXk*tkp62b?1k#JGEULRk3`dnY_m`}Hl37^{j;mu#DXdmx9%RN?< zmq@*xFI0kjVy?X)<_d-5VRtSNLGQP0YN`W+cChirZM3tDXXSTY_HbJ8*_M3~{ z3w7^rh~c(vs}EYj&)G$>1IZ6INe1E7P3}(-LX*WGS}ng)bzRv|c@Ai}?^9j4MUDHLT zT1JUr9~vbmbLHrvSY$aBPI~f$sHh2;G9CjI#Q(z4&%OtPaph}b_x;4+OpOAd>W-*-Yb-CfXMM;rRY zSPtD|rp@e4796$gdHEO%f1Q;(NUXCtqq_P?S(yVOKdgMYwhDj@%_bXF#fewoNAcgM1OSawMq#9ah-j{!hOx0ZXsRA^qnoP zzQm~uUPno{;gQYjwhP>POUoAhfjE|cG1>4mbW5Ub1%%mfI&W?sqb1WHU9=LPcf!_+ z^ZtpX;bF7t%7$0-;Zs2SMdY~skY)x#Mc6u0H5{d><(I6O$f{Or)uTJl%h_uKlLnG~ zZl#qfA=U#T`R4PtqidR{wc*x#^prURpY%wP4SKNYZkK2CGauW{nlV&w7?+D+WoEx#1Szsp;V|irlZ!LC+W&TsT=oS58Zd=PaPNhr~>#1jyU0INwzW@X9?lEw8uhY z(lW~aeoSYL{^zHnAOKG~5a01?p*ITU*+Q$2ZJd%YSkp&^R`Ss;QgJf3uXzBfajSzp zhQS<4WxuH;IHnNEg+fv#vrQrF-Yz;{?}SW_+WLrjSNv_2Va!myz3vERmDrQuY|`WW ztDc-c_+J*u`JF#y2-N?r98`lT67=(@+ims<1Ltswm|k~*1Gj+n`8-QUuNGL#LDFL~ zb^4h2^POcLXmy7n!{N{G$&X*B*eu|7|CyzMU!yf zKzR^rVyAs8a4FU(J>MGQuN6>!Y>m^gUq5!b7Y8wXTIr<&Q(k+~{uouxn`ri;H!_pK zPb)lA?;2?LuiM#(zg`KizOaA^Z;~tEv1!egE?W?ow+@}rt%iSzO6-l+u%T1o>=;eA zG<407@n8=({Swsqx6%7;=!VpE@U$d{V$M&MskfBcVW~5?R;H0bU#hof;-axu|;PP;0;o`al!R&9`>UiW( zV^U)cPj2zk-A9LR$o;b;@K<_L{ylzaZZGP9Iezz5)w;a99OxAdD&}=tB!_{dV^fMj z`655H5B$u_jh>UKuqG&+CQVMS!@7t@YCDfi3;P8zhC7(zOj`7Tdnk*s2$q5NQJ4>~ zj}*}{K6^DEt}KE!E??y~l=cbecrnNmX-E8RYf!@X0cW#< z{|mnA+r>U+gTF1pkjovmaQC^9IK)b_01eI4(_19)#BNVHF8$jt<$Cqfp%`QZEt{j7 z;+jKmsyq50N}aR`l1Y&r^VTd_swA;m%`iy>RCw;sh`aa9JmGqX)-q3h%-T9dJGehA+d?tl3aw9nqPs7d-(?I5hZ$AVbZpgz z8ED$~+WfDN-lGHzH(ZRk8IO{3PdbE1A^a0gqjdDMED<`~D5VqBsEe4b!k@<>K4&dv z&Oj2?2&2ENy%&>&M+IdO*`+mj!^dCXR#%^Lex}&3#qBrUj1{<%34%W#t$leFbrzj+ zdBsn8yTJSLx@b^iQOUBiGg_lbeAakbu_Ka1nUJl<5bJI=(${pcj3LizBhZ`hn1F4_ z=v7=PP{8@2C`@JZ<1WMRZWQ}|z6*}c42MVL;X<$t`#__@hOXlwEe2Mrkb=v(%chrW ziWIUWHA)_p!c%OhYxus2XLX-*0?OL;+Wx8s&T!AAcJIdAATVOEN0>z!bPc}xK<7Z- zL7IgF<3Az7_eVSK6Ir~fh0Tu0bkBLJ=2C1?2C;g5*?|4RADW3aX;yn$4*}KlaBp1QBY2h&S zpR*?z-n%C&*`U&^6K;%vm?=d@)faZZGXuV z3VeUkHHL#U7i)vu^*rkG2Pkiu-#cEqKFYZ!1%C=}($CRqI&SXM$8_Vk4d@hxXNpA_ zN2!ZfVgU|Xqk@dFPql3AvqAFf%Ns47<3IgXjl&ZUBp~I;w_gvj$QYBHDua}U+4q5Y zlM3RCi54Ac?EQW5$rXI6f*l1INTHZB#mY}m3jY|=S+jw?i~|pr=*JYbyAm&tK~%M7 z08nFf^N6CJ9>lTr{*aG3;JVteL#nN6+5c4ke#bPB2B7J4##Lq!-~B7XW6^u5O_c*6 zaj*V9Pvu`My3z*VE4slXHi*IQTs<9QNdK0+#ds!DmhW>&<%Y+aUxkU?AJ`@DV#o24 z$EOFZjK9Wh_n+TePv$PfxQBx%@xt-paiY$qI4zdP7diK@#!hwECa&SL{5Hkvg;RVJ z4t)TPuImwtr)U=w9965+dI-5iR-@&sjf2@Q$A5>>ult5kgFm)@Ani#Sp1bijSUQ_y z%p20HFfdwI{;Sb=<5r$2Ms>y0()S3@cgu0(oc>E$T~-b%RsO8S5@ zUVw69%PJ8TH_6sj9G6zFGH%+kk>2Hk6B_^FoFmk*KCV=Bsb96XyiqZ#)S(=Qw?oC` zM_OBHU{qR?d@Y6jdhg&*=;8ZKXv*Yy6-BvCNz%f;nCC#sw<6E zaGXcI;xiqKli2T*ieNS^$O!~lh!JwBHS!Vk>T8B5M%Sm-9cZunL;b=hG)MO?*wHBv zConuW32`OUDFGpzS7%v1w7M8J{#!FDqCd@VKHmdZtNV5_pB^TjJapk}biKOD%iLtt zw{h#m-ye&d$sNidxqNy2igDwq!)06C=l|>csxi-XVGO);OfaltF#hQqt7pY*n{7GX z;9Ik1Aw%{+;E>OF_(<~wy!z$!A1 zF2*kAO_A8J&sx>FQuRqXHSBFOe)nFf&k2+^k3k2+-XAh; zaHgqyjSU~CUVg^qOd*IhyYBy>*4nSHi(#}#t=<_unKB_wl;HQP6)F06tLG$y2Kdvg ze>C~#uW2`ig68iX%E)qXJDiSCJ)iD}$I(I~vHRKa=nVcG2RtkInSUlodI5^>L-f`k z$G5~`&{#%x5x=Xjlq3lse(CaT5AXI9(%s{QhB!1oU!KsU_x>h7WKlNuw~4ZWrUTim1-v_qXVAH>LJ$vr)-qJQ*3%#7Eme6IX;&M=uVdr~gzy;U~-=DTW zLCw@x12;&6`*s(FSh-7AEFusJSJwNg%rB*Z5)%UC=qqSNup|G_m?t;$%*;ok?Oj^D z&M43B8w29;GH2z`l{5JJTvF}X)!5@D-_fw8!VxdA_G>T0Z9W*RL|x%TQwaZPz}Tdd ze>xNzZR#ys&)^x&GqTeXcvD#IxF|fHBT7jr( znX4~i`5a#*RW7a9Vev=d4<_5tCro&_6UBaAnBNz69C<(y?}jU@mr1cfi5*)oOvZ-l zNyd5nY9NdXIaB$DqlmkyHnFqzO*g?vx?`RCGVFWN!p<`9jJXR{Nx?;*?<)(UX-3A= z=FAm?bDXE|^e%ecORTmk;cDdPLPYcRTCkz72RFEW2PNeg6_Fx9J2yNGku! z`0%tU>^POiLvc_$c{FZqhWO)y#{g5jIj*OpztlV#=T1B!RXB6K1Wu?lG98{93Tf#r{MJW7qgNCNVk48 zA(eLN5zhMs3p~GlU4(c0@j>Yj+_Nz2mUSZJJ-FO_dDS3`Kj76DN6cxFz8%kPx2LI6 ztRx2W*>pB9`T{9sr7d$^Q5rb3cBka62_psJXFVB?5S=$&C`BpmUU@f^9BMZ}EMtA! zmRhwfvjlcm60e|T^R2_I@hbUgYfdRoVm4-jy` z@Z*+g2VfCIK4RE&N+`XOJw#@n?dx*p)Dpad2oKnUqt*^TjHKVy?^+H?aG>Ji41Qg@ z#B`&);>!mGoYh2BYP$#YY8r&gxv5~bBVd;(VeTSl>Jy*A1++StUMxZ$Fp*5H_&8w%7-R}~_#1-txRv!|PJtE? zhdkn2xYc;iX6YsDq#*v^hqHz3cubPF@+rA5J?!m7lzg6_)=(x_&nVdRzBC!UsE#O> z!C|w%cH)AXO4a)9fEJ5@0edr7yE~?TOi!GyWyQ7x16{1NSwaZq&uXYfgnIrEoSd)g z!w0OMvxZHAf|?t*e75cawZ4ZxS!{ZwLM6eGtR+@uR`}MlqCAZjgLY`F&qLxK)_N_C z`LcP)T*G)O57Lbiy`xdC-;h*0oEAAVTlP$%Tz&%Ngu}3pKP~yDfgYiO8_qhpIu4Ap zpXx%xF~14NzM?F?Pd}tq%6x7Jd$9YPWLdm4;Y)Gj1FVR;b>P}Bqgt2$#BV{W1ry^= zsG&_FZ`&?SGijyOxQ2_fQW`HjO6p{{YGnN~c$Vzz$K5cDy)D z8Wv9o;Tje~8qJ3j)mJc74m%%YXTq~=mdC%B1+(Sh^hOsWLux0t{LUFlrF7*bLe9m5 zJuMspR3(I|MFf5BW_g`A9=)_X-dY6nzk71HMRnth!L^Y~U}SJR?0+=beNPsr0MJYl zrJ@LjfrMZwoA1j$8P}pT0F(GL=Iu6{G#gF1FQF~EDwp{9HoWm4mA}mvXv|ViZ_EXH zmlR-0D6O{`6g+w{4Q@I>q@4+1wI%?4bB7=tU94mC_0DGjh^!5DXojGN*BSBku~XCl zg@;5fprT15ISkg-RfG&k{FGoDYS*1ipDvsFg<@o^; zNmL`P#D+M|QXD*b?=c5>RM77jWL;N@&52-J{x8QGv?~T0hJiJo0IgJkbWG_z@i9p@ z*1G5Vx|r?Nuaaq?AwR>rIh!=4(t5goj)xP_6w3JGDir#mu>r7H;n3FvL&FNZ1w698 zU$9G(5&^Z7u74+~ecq?|ZlI@S>)XT9F!uH15>w!b9sq|?I~U>^?vEj(le&0w5T%MW zOJ6!@->6D)5z z3>H9Vky0l6-RxbPzRg^DPH=F~M}V`NA@!3!288&vPJ*^j@TWoLR;fkLEoF`UeoGNKb8eEVT6!F5?! zFGvX5u_2#Hopc_8*fFt%kJ&@C&>eCSmOuc49Ia}0nFT7ZoS9I@hNv3wZ6g8$+IzhZ zF3SmHmt4OxkfT>Yr|K4aTk5tBCMTGw$fOCGT=>PT!IymGQwa2FN=>-cSi;BAZ%LUO zQjLK4I)Bop5{@-TOW$pvmUSgYQCVfsk|4K0I4J;%%m?4G;|*q+T0e(vnQlTFIf3Rr zmZk8yeV^$NkI@h?$kVX%{hOY<{wtCbZqm=%pQ4POaEQ;*`luztn527Zxl;+hfcsr| z9W(b}nl zd1Fx!UF_ZwDfmBpX&T5XhdxX4-Fg+)*MWL3qI`>oGijP&zw)9&C1gDY;fwRt~C zL4~@)A^1}Tj}mo-L~vuXm|7)oEw4fpUBou zzUf98Z*fpZ0>$*b-XA!CO)Sqj@JS0I*l872lK2=AbHf;l?>0M}jOCL~A3tk(owhX! z4NayjJd0J?-W*+4x;=?e0EQ5ux-*-lzSJxA_Km&6$26b`Nx$XlFR^Ej9ofsYWJy7A z$!e+n{X>0!exu?!`k(bK!`ISNZk*+8s)cobU(S5ljHg-JTqrJ1mrf;8_@^(EXvoL& zv);=AjD!N?9^|>Z;7_{1D#%1u*HgV^0$|?ZSbf%;W!$R?ol9@-_UR{K)(=gstY9^gWm(eG>` zC;WhFWbAAt0H#ivHm@S)Wn@etTp)+?g5-a5vI0biD=7m3xD|iC-;*~b+bs@vdaiml zR9tOusy<9+6$f)&-3+lhhKzUrBA7HgHepPK%5OIfs!@%4a! z$4O2I`7iAX8-lB0!*a_WuaPh^6pw-)`zKvg+cQeX1D?v_ zlJ*ZHE<=}0Jd|03WU4b;S1`dtXs*LN%r70s#rw*8oYo<1L1gN(!~Ix5=r)j~k8l?* znv~nds6Oisoe&N5Mxq|_E2&6TzD%c}#=?4$ei0l;*(Ncn1AcK=g+&2B_j$0Ny9==r zaGm#fyGt>IwI1iNbZrI$tF z(Sl!@@Vp(PJn9UP5V(E0q!1iNXY;_1{?WVvT-`R*6htwJjb64xc+dV1uHQRqK+lYR zZlNyZ zq^HMU3kZ`eyn+YtLq|&iRsN zs^Rz>;h-LwM0Pk&(Czd*Be)WYz<9HE^YNszcq` z>oS#XcL>*b16T(V^(%~XE2G7IFfUG+Sy5zG+Cxt2u_s5OyxdE1eap%=6h??v@w|&&G)Y7;SQW(D1L5B1I-?KKJ`Z$1F(3*sDtV0v*Ps z+Z-zuP;nX!V4f*duzm*&i$5Dz4+>3EjwZ9#{L)Gq(bGq&)9O*FK;90$3f+ zogj3+)WstBOKn`oJ^;b0PokL;O$WDFdWgYvnjk%iEPyNL#9M1u8WMdWaz=G_{$7Iw zlXRKkIMYk#quD12l1TcCajD#u;?OWQ&)ziKOj~xEElVteGSNfZtvJ|?yvr@aeKed& zmKqMXShENk$(ATa$b2s`XFqR;L^SX=JIyJ`n;Qvk96xX)tT&IbL8VFG9KV~RCiXwQ zroh*Yf6PZ3;0@ZRnQuW{way5$n49+gPzH<Pmv^d5Y3Lf}b~gH8?lf%7stdfy6LvTPJ@{5z?oS>&!jCk`?<4_S{_ zWPr0<{7a%^))jg^4t7rpI)gtS3>U-kIZD)sXl8Re_<)oB`h#N*kG+WuoTR|0lv@|) z5l+xBs?D_z{Ak{Ac8LUFZYogBTE%)U$@hJDDM@x6uBQ&Y$n-za9(DmBMn4rnCv=Mh zAHEwJC=c8Xm2uE41CQjmWzW-jhqznl1wd2HdUfO10_p+EJ3!_51ufXohOKsqQGF^f z?qA<^xIIjaLJ;~|q!4jB_4B6&T?+!A`vuD-#_>_&0Be9rCxJKPr&K)<-FALoWpe`DO9tK+_mj-^I@}~$U&`EE#IJcy^p3-!zVT*l_-nILW_vo;e*UbM()B|ZS zYRZ34c5|>fH!!Q`NdK+G2+hG@OjFXk2=9%k*uuiz3Qhsx)N?h70)>&fCYKDs5Sr`e2RA82=S+=1^ff;O6~EYcR2sGFO~lzpj9=dfEG5R z*#Nuh<{hBn*ag`18f_W_o9YC-z9{X$)U*LCBQiJ^u5r020uE2>w5?*HY4G8a6>o>rJTlV*7dODBqy~yvq21J5#^)~80bXQs( zav3hJc_BGzRcst@a!D7tD@&T#U72`x4j)Y)B(R0Dk+?@rmz$@7j(%z~z#f&8jQpui zQufu&z|lfC*((}l_${&j6{LIj0SGd73+u~vuW~L5c7vncpU|>(w+slz?K&b$f*Igf z^On*$Fh(h&vm2zn(nwm0k!78{*;DNIQmsa!HLMdy>qG+{Ev{3)MBNd$v4mWla9Ehc zrr;tzMS`nqO1??^xnG@>?J?*^8gTW&M#ma*1wR!EG^e<&6oUvSKW>SL?5& zFyaIctslT6RFDUXCV<+nr471M z6SQ4ji}u?0C=@$tIRwU2-%14%rGE!7fzf}xkFV9W;)O}E>$3{h6(Rx>O z9bFGe9G!!dfI$SbG9pTJtR@J$F-j}7IQQKQgFm-dD~a62^*ty_jJD6Y?H` zq>~SWc9}j?%rikYz~Xa zy%?-VYg!!>hom^&X4Ux3go`63|Igt6|H#vd2L6+9^-li+dA%k;KTJ<=XEX_E0;j(d zaTAc1ujZEz){jj}<~bqa_xecxGHF8z_|0aio20<`<7p}b(Qk;|w;>y`mgAWT*AxQU zbcBp*N~ydZas$fZyrQV2?BOCf0pc9f1US>!B0eI;`nVsSqSEHQi7G&bFqWmg2Tj`n zurmDaR8=*Jn{g{l`tNkL;MS7c&Ux)H%tKRzRa#H)8;1;ipp4h(w&uFCVN0^IFrki} zMb2N-vU6y=M&>0XEy6ulVv5qN8NvTWV!2)-44YpwtS(&$%oFu;zBPc9XJQ~1b$urT zQ$ai*K%pH!Q9XaU_nTO|py{zHct-ZFqIv-6-{pOfFpN1b9j3~$`4$I(#yj1ybcUbx z3z|e0KaFbfP<^#8{#5H*<7NTg0X+j*-#&6l@w2e(Q0&FP2xV);dSc~KR2vSOGy}`P zGOvVwg8#MK_|G@M8ut55GFkkmZ>EgOxxrtmm17Ku4DF_OlA*n!#n^2YK%bG#tT;Gp z-ystVpr2Vua_0vWkzPk=9M!_#1)X|Z9JsU<>e_Ss1R??3jNWgvoA>>~y;F(KMco?W zB5#)*Ykld%!bSlbLNEX4{BerFWgG|g7wTGU!G#Xp2Au~UE2Xv6#&M%RGPGv~)tHM$ zil#fJrr4?69McE0WjaiN?AU5l&@vSJMNFXe?8o!%g}d_!{)jU$xNIs07LgzBTVj_gZ0#TsxFOz2f>#TsW0N7aTI zgAB3dZtH0aK26=>GYzgC+kDtIlSb-0167PYBfGisW{Wvnn}dKZ_Wz9;{LlX^A`a-5 z!x{JL3Pe)NKF@asdMCHhT=;t1^0LU`>J&3UCB39{^PfD6ro*iPpf7>HNoq-(zFLp) zB+l$d#;;?r;<&=)TP_y8rfl^L--m16OAF+HrH7lBdFD{A3e}c|gy*e*tl!tig5~C% zL6i7^cH`}n@a-o+10N%w`UZb@%nsYl=G5V4TqI3=l4{xsysM^TA#32i4 zf|-}vMn|kNDbZ%&5miik+9_E5U+VjrbyUkA?$eAfV21v~Vsv&EW8{bW=5()J=z%kr z)5hsV^y|Wx^G-!TJN(SJlh-)ofSv0|R0FTx>{_BCFvvQqKcnzH_&e{>(kZ<87L?-a zByc#(ZWMcSymTW0f&POEBKzJqA3j^qwGKsZkn>CR^+ow71t6?!of&s5)|-sKWGjl` z^Ex&F?>O1h)l=(!#YSJCp+3bT^3SjTYo30hj{k?+WdGuNk3q7%7cWtx<0b~cI30-j zB^{I087to~lbS371UR$kgKo4g=Ijdw#XLZrGSNdR8`z;;%JAjpa&|Nsuqx*^ zsrT*%b-Y1M19t*a6`2V%LmVfjjzXhcX{>bPTbxLIo=hCT3NOjms1Vd^8PSe_Y{&yX zmh!39gJp%2Hk9+0p+ifll5sh!pEAKG4NxQ#ewC=dk3BuF?csB0 zKD)(v106gnWjhoCV+%W@x@HR(+mw_B>SbzZP@pWjMlbiGS9s}Hy~PATqD>hpkOMoP zUIn>ElM2xvfnPCBUYEQ#yZ^i9>Z>&7GTuh*v8t`<)Ye67x7nL@#c@~0-nx6SJ6yB` zPb})>uyQXfTd;}YpllEu<^Qs>cJhM-tO3}`4W^pqJoxjj{SWpnx6cPC<7>?2fU4%o zI~N^sJKBD;X3)mmOwTi!|ByE;2Y^?x+^8*Jo$^(iP> zW_uWj#d+F=51&!AHW`}Mwsyiel8G0)NoGLWJ z1XzOLON%a-YW4EToOSLU+mFjEM)lA8lWk>$X39nUE<5{ix=#QUNO}SR&*h#xt zRdg%3sVv)pO8jIdSnKqVobQHX7(k_7Kkn%2Uoq+l?%Z=&v)fPaaNM1=oF12zd(|x@ z3A9mD7<$j@@M`F1aTGcv4y~Rn=!2nm2c^0NpUu4#^w+5VK71;odVa$8j?7|BpR2Xx zZAPN)-jtUA--WCH!i{|5Li|CWN*l8ourv5m_oKy7uFJpd`WqK4ma^s{Ie5aYjKp_! z8IVG0kE$M-*bwHzly2(Mr`u_Wvzl9@%b||N)LmMlG9~NTV^d-xjd^5v6oA4ixi*7d z%_TP&jpK+fR^=Fe29AcuWfG~5i}}^w=chZQw-h8|dZg!1?Ay(Rhx&vnaIw8@;H&Rp zRDKh*^qCPU+n-3UO@`idxhR$dzik1mjA{|l5ZM;X%#_eBDw^*?3d*K#YPxW->m(V8r*WMH+^S)QxmMVAG zORd*kqU>u}WHeH1l(0IKqhdby0zOC)1{w|61IqouR_KCol$e2B@4se+A6arh$331HC^TF@u zbDm;^95emVBn1GDj#4gx*-POtu3CRlh#Zdp~YG&=nlS zCVW3W+n}wH=o9Ga`^9Fs`n}aJkl(>`id=~{G*iUi!0Sz+CM~Xo3qeBQAd?cQo(Fga zHP0p&TD3}Q8K|7?eY+GZKtl`?ww_vMzPsF21vy+dMjbXtgkdp3MpS`7o<-KfR=caw z3miYdq3`6x4fN(SZ`Ppxoe-by-Yt~ML9X~3NHITT}G z9b+UJTp$#a;*n-T0ZrSymoNzLN#eHEkOe5Be$vJ*tEUHQY-NH0PhJ*Nf5vxN#!|hV z|0me(KPx&~MrXru%ikM&pIx3TfTO`!Pvfp_U)7Mo=cm;nk)4Dl?71fHy*uC&CPSL6 z-7ubZ;R-(>0`?xDzXLE&yNn&f9^93d_F;Gu=s~Xx#y%%9Gab%7(S`T@Vlb6{$|aT7 zczYU1y#(knXCCySfyfE$eO%C1@bRmfPhaGeyKj_N*22&po z)sErtjz6Z~z14aI?yy`_lGK*#hK*y7_D62KnKeIXO1DK5HBm~{%KyI!M5t-N&Zug# z?`S?(3Vk~_`LhSo{nsO{S#<};VE+46icD+Mff$&%e)A7W>S8)I0MePHR-wm`qmgy> zzqAhjC&=ehFCtzg4Zh+RQfN@U-GYkSPr1V2%FB<>MQ8w$rOE;9if694btGn^JaI)@Oc3Kj3=b}Mc;7)@jl*pdI+5CDzQ z8qImqZiC4j5|VV;QSUbkj_0dYcU`o-3V_egvzDlK91xdCD znFF;imP&-xpH-I?z;;x*x8%1@u`-<21zVxg7#3N$G2|d}lm0vSN{VL#%`j6%il?7y z*4zKvV||B%3viKeH!fG3{hsc1bx}X$F(tpB^>VpJ zuo%=;seRDYiQ!6K6`=p$V6*>mE(M50|DZ(TbinBpYfP`*=ME7-@$d{$`7VulD`^*>--YROfj&uKi?7dZ3 zT-nwI8VK$ZJa}+-hd==d?rx#5-~ocWySoQ>2pWPr!5xCTyF1*S)9Lef_c`b7zTEG- zPf0-)wfEXf=A2{9F{mSe#>T&wAO3vVMe3bvlf+g1UnlafN$pJ#0jv&!qqsA#ziONP z_5W)zz&q-52b%t^tpyl5X|!o5xZ6C8E-p$rN;(GmQ3K6$A$}y3a6Z|%@8N>TzO)g) ziHkad*=5{4Iw7cOq-Wo2um2Ki`6}Z->Y=cUAf%ySa+6$f; z0(g=A8UlMVdx2*2CHUVz``a6U7jROL^o9NuI_0TO#L6U!d z!@u9{{q;?okEZ(gW<&B{ui)Rm|9|&#f(SPwf>48gU-8>lZ$9lGjx4_} z|KIQR{zTbEWVVWwmTK@nhxNo77}neC+w~>3zkT+vFZ|em$J2qw&DHyVfBUb$2cF@| zG8A`!CU<)4go*5!_ap2MKoYpP*u`{tdVp?ryA`Q>I-|g5*|(>yR+ui%Y#b(ou~1x=%?8W%_fa=Dr<&ch8gubOKrb@)VD=@o)Yf1e ziLl#ldtOsLk${Wn4Zqt>)Nlr$9F<(gD6?+%9n zXc*R`N6pU)Sr4~`>SH4GrD6%Oosa*w7XEUtZBR{QZ9XY#H-rAf{7!F6W*($)hBNct zRJR&_A>2R*3a8VI=>RqL!3@g1Ng|=B&JM6>ss!M zhdfUtXqDc(gIT+|$7PQ>$CId6(~_^~L{9y`{OI{#57v_cxPZvvg^2<=)75OT&-c^F zH2Hw_dT_CFxhPO~J1O+f74a%jtwq41QA~PzbY%F&=D#Tvz_jmvm90r1xfk|BL{i=L z>VZdvS}g_e2yHb8dhfBo`QM)|nls$jFI&ELHfm0>Luov*83L|k<|WFNkXQ_wU9cRO zc^DwEWv6DA!=>iHY3rZV%~M@9&==AnW>1@CFeLXVud~f(tmPjYIB0a6oCNi|;{Ub1 z{B218ctOGs--R^NlMU7a4074JYU*v)5l;6xGJ{QdE5O|$Sql`K&BB2T8xj0OaCOns&pcTgdv1G=QULcz3W;|$An@EhX2bm6~a^wM6pNmG$ z`I<(RVLn$XCq)ewD9Eg``!n>D84i<9zC8iwz4`U`<-ctU|9jFj07lZ-D*3zJ_Q!j`G?%wnr;##GB9<3PGjvX+<<-O}5%1?D#gKaM z-SL9ZIoN@B|83Qer2w}`l<@>O`-~SU^&0JJa+u2pTr`EUYf&_&_7bPt$8`gk@J6I1z=GI>*9H7CRe|exx|L z!IiJG5AC_w+P_>ZU;mzTX}5j!yugT~2_e1qqTOb*f9e9raRLv5+^+8OEZ|aPSb8owN&clgexhJR z^rl@0CpK+Gw|BcIJXL=@6WC>fXA@?|0Q0t;i`@zRaTvA!|Gc9Q!b862_D2iO4X#|H zB}F3sXGD`LsbpBRaj{t4QG#X}GV*9tvacl*IXP-S&P8ER*e*-a+pLLJS!j}q0=D(N zIWId$M_^HjoRp_0M1X%WECu7bLtoJVmY5Ma6rtOpn@&7_GZQY4wF|{6<~zRSnLkFl zLlgOV!xF4(qc%yT!9VG>CMkfMuvw=PUG&``|Ieey?=7&+qOEYx*V?R$f2Z?oaf>5! zwl$gU;3quoJ=^LdZ=M7@@8ofsFJUye-!oFkCB$j6o9-kOq+m?QW7`j=e4ETM2uPf( zBWx6)-8ao5Co@i|3P9ddsWNkx-c2s}OEJ_@lZh3#!lqaEDpo1UcKBZ7 z1{}ziKfgvl&40@r&J>b!JX(mK5eImUMdFucajZrNiF=g;eht8pCld6{VYNdnnZotz z1^PBaWKvQ#27rbR01#V2B!N&gQHXvG0P|)61uV;(I!}sDugU+(>`YX^*(ME#J zFNYT!vl!vTxNNX&mu)e|8gZ2Rs!j6%duEw%BA$v3)tH1Tn5lP^DN!soGkVV_WU zjkK7NPn@U{Xp0gJaN}P@OLOY?2@Sj(EDk;#QIe-Mdn$bcdhSsDDGbYi!_y z`}I+C?wsjDeRSt{zm+SM%$EFM5@kX*lcB7Bk=M7nU(ZC90Y&=n1A!>p4zXzDRI(}3 zN%o)nV)7JMI&v``0NFmLk*Ty$e{~8&3gad|=WA|y)C8?u zVP#3HRTpNONkstc*SQLY|yB&!Y1ty8d4{jrx_7)s^72D~AE=g@z~psf@uEqt-dM8(#;9_Q1@8VZ`Lc-R{BmUC?_POA&#sLvS{L7XSeF4$JRL1 zKPVTG=p+TuNJ)$)1OmZ}S`Ugo!)IRcJX?6yJ-gz%Yr%tCkoe$6L_9 zi^Ma*m>A8p6?xH#5wO47c~h?U5n9p zhrsv}TPK{pgjhr#%kDGFWxPCW|BOi$GGDj?i!35jz@1X+jqIyCW(uy7D{j}dE^qf> zQvS)N+CWrd35BxnNnhSxD~HldHWf=l3CoM(OJd!=nBgc;N zX3B~ER!!g(b=E>T6@?5C=>#E&#Ic>uBsPtq28p@1-u81Z*Q)|8dQzW+zS*Ypa zVbZQgyF3^RuQDD2&utIchw2vyUERx0t9_T0`QG(u#rpn=r8bKb8*Ze$^%>=20pUl2 zj1#ekf%KC%e<(In7Q{=KfrI)b7JeL-8=#M{&L@tG66&Y*y$3@;Gd5e(JwRfi;U{X1 zo(ZR2KRO#suD-Y@HV6k3!yip`Gh1>XVUYn$1HCbxRfzwR_l&}S4NQ^k^uGk6d;t&! zRfM!p${E?YuKY_#IHL&@#xMI^RuT|p-_gcX5Un|!%1A9;aDz-gUblC`s^9qQtAp>a z4mYCMx}NU#N^~h%kYT;`Bm0$*LAE*^U3rS2jg`l9v5}*IBtXBlJ5&lVVM_qLsYD)w zAo?&|`W(4DA7VOu7ASUCJY{YO*f*6b737oVld_nO=lU$;D02J#OyGBBO`eH==Cqun zH^?Vt;_ncAyf6SsgYeh7-On!*1HR#pmu{Iaxwjk`0oO%mJeV|@+qd6yE0Pit7-;_! z7!G9pLaFVD3*!kcBo^d*((y;?;hYdDF5AhEE|dXO^LS?~XcH_(Ocz^ zUHt=>Mzef@0QRg!vx3q6%tlQEiHrn_d-3UxPCg8z354&P9Hpr7-R{?qj_#LaRuvVG>*MOcH!GK1A!5p%d)N0=^NK z2f-5ayHT&_v=cBSpWsW71$f}IOt>8+=X-IVlPKPkPhfOFK@MK;P5B0(;2EBAj^7ti z@_DI^0r~2sj_i!e2T#MD$;xI4w{gT;(}}sC{O)&QmE{$04LhP*gX*A8F@G?&As)uT z-C1#TdEZw#%F)zONajL;%^7D>!_@r;eWn1QVF zBz2M5-HG-YyVVYV<*H5$NPRISbGip)bnX8; zFaBM@6SafPXzdeM`ov<;LxyzU6_sz;7bA{OAy%NmB07Ht|Nm-%WycN}( z*aSK&55=2!(q3q=pV+}%^JzY}f;HgF#R@6}`gLr6VE7))kFf=G+n3CE>ZE?y2-Vfw zZA>c%z4Cn@CWN-ogg5wxgrvFg)60&qYnGKb%u_^7JRhzdLByHs~ zRvFvsm5>}ie!M!~e|iKAA2tU@^>TH1w1Z5O;z(5bH$K0ElFFn7=db$O=jvx7BpgY) zmv2cVwF$6vPq}o`E(2XfIn&mypw{r4MIhne4#LB?k49T!FHd^PK9=;@`0n`JQldfr zhB#m-oZuR^*7Cj%9ah>NO4luk`wJ+Lal>M#xZhl-Sid7!8*Oc^tL_>l?BmiNZfP&} zoWs{BB2C%zr>_lGm#RmuP68dFm{%Kpm^lri!6F`vZPkOAu4iwYFygwhkk}${m@=qj zKH>efrunPxn9U4vC2x1W(SyyjDVHgj4=YTTklNk(=Kc}Z8!UP9fq5iV0gT@?mYc+B zAh_6wqyDU1xg;Vj>*bC%#Ch$mUyGN4{L7Uj)E5R&I=E_#$+~z%qe(xCMTXY8 z-O}z`84!?-`#^{80Mupi6ed-o5IkLXQ#551ppaEOUejU#lT>zy}DGZlow>*jT=q{YVD&flg3dmpKM=D*x4!qfk8?(F0U`?Pz z@;bnI?ou6RO1=jtTN8nFIPX3B0SkD>%1{zW9|FhH2NGYtTql4BL$9?(k#=6ZNt`KB zEdp;4UJX>#K@P0s?sF*wM(BL} z$<#XbMHpnb;%x5WZY-rEUAa|lrB`XTtWu5q--?z+r}jjCY4E@Y&;i#ah zN2P=Nljt> z2{vip$2AM)#9vpEf?yOAQ3WyBKufH~sPWYvA+u!m;?xFQ{5)h%B~K+3{D+#~007+| z*H_}@lGS;65Z5&da9%3G>?}7r5;JJlbnSQo=*N#(((RT8E1M@~DLN;I{b{AaQB92u z0}3Dg{9$3=JB5Pl#{P3i^iS*gL(0vXD$bFB&qYj~d+2!GwI+Sagb`aoDq5;NA!sAa0d*3CgONcuuAf zn)cYn$>5co95Z0hIM?{bf^-_n)G*-%;k#LW>M2`e%u)w?@$4P*#)e_1QQ=2bjB1x9 z;QZLcBC1jw=wvdK%2m7JV2ugdZ@Imrqe07l(-fcPOn-SaHL(c~ugnD0blwXg@5vFn zKO~cK3zI0owA=hx@$MKfg-f@^K%{GOE@{~vKsj8_+ZRdle}~hR-Y(1kdiC~lGf{6x zOh@;6%3iC{L58Ln)_s*sYWrr_mw#87ZxzU98k|_)S9!vn`V!iXcC5sFPjDqD|ssHEwE5Zq8 zs@mmvqg5YhziV+wwW>RMx!365`nAkQS5#;=awRZdd+AKUlUhF13l)A^6`}1r+$_1= z8~^5h|6}FR!JmbEV?$!ye>>$I5ACp@N9z8m(YnxR0DFAm{U(pwMv&&E8GO50mvv{D zy8d*3NT>lWpYf}yG6L?wkp`g}OQ+nS6dsw7wD2yAk|gT2T5;r!tK8uAqA&G5BGo5b zmWQZNGb8@*=PN;c^Q$9hFw3U*oR5^;b2@g@cWIXQYYpA@;pfmt4e6MbKR){g5*pu@ z%D1FtSd?~7SnWg!PnIS(f7EJm)6i)7TofaYR||>SEYusB)uc&Gg4z%OWh}XC#hm3YboPwv)eLn}PJx0BmQz#zNd?y$5+cp`*?P>5!M)IJs-c zFPTy)A1UMe7!E_H-BxiD%SAA5K!B48P>g4T*K7F*n)1Fs%1+W(rtdmwVt+*yalMvP zOfWDn;s!w6RoYr!vZN13jnG# zFKN1~86p%INwhwcax}Ks;QxIuI&p+7kj=zo+3qF?yAtNcZJH=#uLPGaH`{?))<0~( z3EsTqInxrNMpVktpX3~$Y)}xwV8}mHkJ;SnpctlttQpX2wX!Fl-I6=(5!8kEejD>f zqVTOZ+2@ruUCnCavZPa1|5q>~)N9o`Ni;Tx4VP6d+e|D?{X)NZXSI@854zHwHgDga z*9HZ+#J7|85mdZUiKLU_w_r6M+>um`JbQ!L>3ez_PZ}{ZSv0W=l$-2mM*&vaG%iYt zZje)!hSOQyK4J?sa~xLYS9U$^Z}6Cuxn5pg@{RBi^l#AVYY+f20ErvFJPO&gAlvO> zBaAVAQ=m^U?hI8JFfx)5&<^-V;nn+x6v@G&2-$I^>mnP{JIQq3GPhgJub(O}>Q5WS zC-%s113jcbyf^OyoXha;mB~uzv=~92LXylN^;;kEg;~?$73^wi{AfEuUDq2g095S`p%w* zuEzS=gQw=zz218khPtqsO?pp5S-CW7^jfECn>gG(ykwqti7mo|Pj~yrA1AgLNQqb> z=dX_rpXjqmRr~qKDIoAB2J=l^8WtNQClaUa8!eObyX89;S?Dp<=lki}57gZ%qQ7a_O>t!q(;=O>?>%~U* zLMk2|A)B*T;e>o+`krN)Q-zvSk|?`RQKQ%MPh<|8N;yExshLWvN^Nd&{`=|Nph84s zZ3%7Dj$gbIUYoBm1py5FY(7UT0N6I)0ZJ%qy>FchSkko*`2(EDbn(G&+jZe8YD_#1 zl2u9ch>G8MyeazNQ*OKBCf$bT?R2^}259kPT`y{C)>-fn3OIyWEj3Y88({Y@fb;u0 z#1Iq!Rdl0IQYM$5crk-epsYgRl~qf%EqNy!?9Ec@$ieFyMH6fH=jCIc`B@ZX1W(=D zd!Sx-Q(EFE-AWDR68MehgavObh~ zHj`ny`oL%!xeRR-=Vl3~n-g?G9+yF7vAR+XYUeY7Jy z-?Ec2n@v7{9BSPUBOgwoU>mwIiK!f-lU`}QB2FStBY5IKO6Ss)7DSqC1z;Y01a zNTNO`Vw2&FoQXB2%NgqhW|sgb(-G=~uvFtWcYO%?jYCvVW|~bONm;6Mj-QKnsc`F4 zSEXM-5)k~clth^zZ!9lcE6Giln#90xtC+=F-zbgHS+pM8w+40I>Im)Nu37vTNJwp# zdR(oqkNh(l$!7gU!df{n9!Lm3;c>Zuvz)Dp2Z%?20kmFXCLy0g!vdLK4UWkB^LG{O zaQM*vT$&22b*QUR;eXlT^kqm)B3|5G#4_qHN+(UWex8+dsWvqzu$N2c?c5by`txCX z!2_uW@O4qn0Cxb67)nM6pj8lJR;C{jU^U-t)JCbE!ZLl^W( zM_&kaiGgwO1uRuXWL!!cyCnQa+&XZR?X8 zUE6K;3nLRuUhc?W9SXgFernXMb3UB2$CILK4N~&IvG^|5jEhec<9!+uZ@x2{MgIA8 zjbc^hDGgw`(&0k=Al)*Z$gN`#)IwV3v)>D=toYFGrrfA%-#`lfdALv;SZB2;tS0ul z4A2Wz0ARL4=Lv!?=>3oOYcX*5NYo>oqcon6AmE zX3jNU7h%sZA|}swySJ4s_WKU&_(i&9tXNnXC*Q>C!)(?mQ>%c^ws%X2v6OOo<}*Ic z&+R9^jD*t8mEJ2iNe|&elxDULvEzW&5p!%7+RjE-*@}~c+4CBMZ7aZG>ekmp z7(mbAK&~LR6U|1495+tgRu9k>7Rz<9Y7uqCd{n=IV)9Vxx3MFE`we$b0OXfUp@%o> zPg1xL=6^O*jU|^6DfYeogU$$)HPOzP`gYn$1|uM>NRX|rT~UEZIvkf@>!N}Iq!O|*5*gG{<<-ZR}o>jp7~qg zu>9h1%;O>pFq2}phV-PDuMTC3=-t~(O95Bvtnk#d5%9zQiNa)q3L=a4SKl(~?IA#J z)UgM$x2SYPO`yxUxPWuhwgQb~(pi~ru6ce=SpJtr1zMn5A-zcb4-)9w0=dl5Ow2*Z zht&?;n~_yk-lp&(#Rmn?>MzrB3{&bNUJ%pT=L1DVjPA0F3$MF7`(L7U^H`y!(?gF)Yi_ z91OqdT+QLf`?V&U^#rtTb`_ka0!l>dj8P;akHv;-hZJOy9iR#&bC2%z^mr}A357Hv zI{+g(sOMq-O5NRtQO5}4)L^5qGC!G{VKHn}#tBOUS@*qawUjSx7ONh!iv27Dp#8fY z6_#95#-`6}SoOy5(oV#B80R`ocbRAv&lqds;t{R8;3tx2cM#gbJ=!p+>X7U~a-Wk> zv}r9h-vZX3%8bfpgeGgcMrqz_W%+uZ*tGVf-slgWl=qGdK#tmRf4bfIg96eP3HJeR z^5?V6ryZ6iix6y@gD8)i6NS~snl92s4afj-`@^}Rb~a=p(8+~F60>*F&KARbnoD(I zm;=#AGd&<#pcV_+0PuEEUcn#h#rlP&nH8QSP0U(DbB+hSCv{Jcfhkqs_ghq%?)s9Q zF#YqFSVUmEEymA3DyY0LMWdrEx&c<(a?4Bc1Zc!Wj4p!3(LkUNLE+Zc=)&+jN<}sz z4inyP3wL5;_lY1Mwm5l0yb#rcSfhhKy|H?TUwv?L%9Q8qrt80W5m~`!70U?X~JZCb!Vs8%ln)a5_&5A`j@8)7kzg zviR~RF`ondY%wq;rLIjki#ev>1Rh4`^o*8ieG9*6^SH39zaCR_%GHR3zh1vyMrH3e z*z{bdOsT8%HqrCFv!|)RowleY1$kg$2K3=FOzSia{SoVo`Mp5B4Wpz=L5=-mDn4Ys z2oTmd^fY)UzbXQ4$jVPY*D}!;XOxH*v}AFYLNhemQ-2yuD4GT^Wyse(vj%|>Xm|b? zCg`UbK{zd1>RuVP3_MG18lG;~eCu^Y7)}6=9tY-eRQsc0G?74SK1WIv1iC?wP;8pY zHaj5^xr@M$Ae^;!U(YvCB|^3r+>clD#AF#-UrjKYG|=XykslvUd)rYiJ(nI0$m_7y zGVLD=8)-%&A7Pck_f+62TY)dyadjV)>Cy9zVl(RhOtZ6liyD$-u&4cWz3kzhsQqj{ zt_}$53c6p#sg`Nd@dpoP zcg@)oSO(Ik@b0a_h#1rB@M~=`>P#Q#Pd8~`vK&;DgmOeTZS4Upf~RD3bz%e&r)I}v zK?>Q|+|J|Q?Qq~a8SR1A*}BSI#C&lhVY=>6toUn`E^8w+rDfB3MJuw1g~4RINyhIM zOHJ^P>^C?{DJ`o*cG*sqWHCHP@zd7dW=irZ?k}?Urf8GL-E2mZX_4lrNx+02cM@tW zl`0~5y=X+0CrA^>ola75)U>hFLF&QNuB`wO6ac)hY=81)@Ns(@^gNj=d#{$@wxp7~ zdP&c>EYf$a^RV0ZJCRS51sFe%GCASWLOEZMnhYO^r#E%{o)Vz|6-2D!eCXZ4A3yMr zUP-Sj`#2re`tjjeeEe(H%bjb5P9zAJz<52UadiO}MjeDJO@!T1bVK)5vT81ZwXdW} zEv!zV?u!Klb8%8mE(c48xUe!K?6zGaWW#D~t72+FlCPM|_Np^R<}WFveAhn}HNc=e z=!Tg793}^UBfW~5;)F(i^d&LHQvURikW^o_q^`m#0C%j|WE zW|aZ1-JD2XK0_h*yV7W5F8vN@78-9>IKO(7Psq~PBJ&3to}o#y-hfcXFa;&}YgM6U zUQC!}fY(zn24!63p>k@m9-V)nis9v4kw$g#UVf$MsH>4y*Qi!W(P{>8Vw4fg*O#X> z>0>vUz>T3oqFJP(IhSUH)OL%$BGphAq$)jhaUIdv@8pEK(HBCEvpHR94-BRU)=C5D zmy{)eX=oxn*L0;1ZZNU?&%Pj1_;zAU9TFX`ppJf$mA$K-G4c*SsD5Ui{)*NIQ3G+* z0vgZooLjpB1%T2yX{F2hukM4l>~DbfhOv{}bJ9PE*j}C(ZK0Hca2KeZ($Pu&n4m}I zdvbpIQV=4GK{{Q)nJOw896zAHZ$lj+xZ`{ZHph8S2+hKezC^jIuZoI^>^g@Kw2`o6pL~-Q7tQ;tuD}>9Y@hv zg(xfq6n{}{ss(rM@bsOmH59NxqiwZc#7N#c152utlb-oy-V`q)>X)-k-@q>b*r@pCa9FExJ;0?kBd&Fe6k!O;~hs zc^U~xX1E?n^rnjOQx13uBk<?yF9NvV>#?f3m3>77HKMn#i8RwbjWZ_^=m< zF+PrOn7TN8w{j=iBjcoeo6O}ItpKyVdP!Nf@#z}e5zB17q^%fLojNv6-9`6D%7In} zaPhIrWVwjx|Hxc!_#m8Qq?5Q+2Ge+Eg!#|6ewK&cn5uT7(e5+TniE+7rJ#u0-0i*; z4mW=872(`aw;f*$(8bdS9UQL<_nC5?39e0->C$Avao`j>jB*eg{INT@5}!bytO3=2 zB9HX0)`hr;w?Bb~rdw&BWw%<3jA`%VLVec73*}BL;3z&+HynB*R zAceCk5ZLr^})Ww)ix z!e@#Scz;|wvsXe-%GkNblf|_c3Pi7RWd&F>aZ`|0Mzxf98Bq?8ZSFp^K_IqA-U^+@ z010^|qt)(0S5}ciYnmqMGP3XeBT~QfNI@vDcqV9*+~{|Y4bTb`LC8nCW?YNeUQKnQ zEd4Sb7Io+}c!Bg1AcGgou2#Mx0%o{DpfSdq_bXA@iav`rUEh#iDs7g5%t=3RSJ$7e z@y>6u6S6EmC37mc^l}fqad;c$`{`uYq}ecxZ{xE2dyRoUAqI|OE7`u_zD4GVDj)5d z|6xtCbXG%9Xu%+kw2F?yKmhW7at!IwBPTeFhT+p$v`9XCWB6A$*`34NwTy{WzC zf*1~d+7A7hnYu}S7?o@N>B2r$q9rvfnxutf{hG_Fs#Dsr{Bs8_T9$O^mQ3F;Xh4&A zF8rgC_vWR?&S~VuXOn$1Fr@qQB>`6$JfiX)kY2xr8v2S010z?z9Dth8>B1EP59@He z1cL1vRpjdwMTARE{0Gkd2`AF1n z&I1lSz6H8O)pH&9vdK(I#SI5QxUxash3Jc$Z?Vg2C*77$mXJw@4J@B#cfAEW7x#>E zphgQCqEn^EF^iwh%1e4>xECh`e?1RSKTMC8-1E)FD$qdkB_Uab4xQzfZcW2~90Ur9 z06eO2y>J%&J6m~0<~0k)LY`-3G!U09{IWIY=5(E)FX!E}X=!6Mcsm{<3(33wv3;cs zj3rrTH7|Qj%pV1DF~!2~@sOlhYb7^RqhFjPb5bi_p$LE=%A5w@HM*+$(+B8rtrm?l zkkOLa4BuPFoo*ro*nr8P0KLm40C2cExJ6-^o-SV`f#v{qn~Pn-NZ8Y!QkGrgV2=l0 zw>Us#D~75dnT-;YAJd@Q_96MZM<*reBqz(65Fi8v2&tP{1orlqR7__rKi_1Iy#g2L zG&zm^?2*(VRove~%y)5j*oSLc_H09I)L|`)z>UF8@&6U)F8_*iGYj?|9=%8`aLz~K zIY~yR%2i}MqMloVzSUdaGp0~O)wtiIrWC&|<-?woe*Z_i6|lMZjc|v3^p8VL_%qx; zhX2|9shG}ZeIGoRCzVJj%BAKCX-(~PyaO~th*qH(@dE|CsiJFMj*mB#wHA*=_P1XM z%BrF4X7)cB2^bHy;jydeI-zP91Qxk#eLMQ1+uWK*eD^5&u1j8E(ae-BhK$g@%|I^G z9z$2V*aN62stq4hr6^KJ?M(Gu_Ib+0#tIb@uCRDK7}lE7(*?aaYhjg^9j%<`jbY+e zD|;%2((jqzRyPs(By>lPq+fItjhE+ zyK>>plC1N?BC;34@a)3Q9TWyNc|VPgpe%WyA%m(7dIz@!YaZ^N@aN=Za}D5#AxHON zM#vZ4+LbFttQL7+tBm;e!7&e5ecUS979Gv4%iogC0MePBfN$4nAb%oC$4&PwZXO)grx8uPJj zs}1xa6^fo_SIy;!i0e|WhCiw&vpXZCbKAs(gz`Fs ztgI1mS+X4V%C|fSS4V-fUzmS#YGQUgxIpJXXVdP`vba4{LD8j6vYbd{OBUe@Xo zJXNsV3p2hRcgZVbnuYQn4KADbGU=R3TT{;i83L&-eShxFL%)vfAz5x>#@|Oal{Xap zTC9q4+eyy1d54Qx2>qYyF%)#tx>jg!?>z2vCOGjqo#I(dhIc-<(($43^_!jGcfc;0 z1Eu4ppRbh=AV&Z#ZnSC^7?hFDsF7t4LJN#Vc_zpvEt5~mhs{&jMXL(_H6wzDk|!4v z{#ye$kp`DiORZ538)RB^4OmWNr->N@LbwIzU=7qqu+r}RQw_*ZCk`(r| zu15Ptx1lUR<4`=}CfOFI`=!$TdXI&88TOxA0O19ymWe>$2y31Jc5gI!!07fO&|+kh zpDs**yTtwF`5U{u8%e{CF_u8-{biGxfNoR$#d*||ZGE1k(g2&uNU=(>;&-xD?syLC z=`Q__7Y8M}Wic1Z6X(UOJH&o%nJ|xU8I}>Qb`y($sVTkpn>-VEnEkbaXuaFy_?fwSc;n=bOP1b8?pMRSNeF4 z-|z7#=mbTeDX;pQ)=DK-B*ip+-_~|E?)uuLA-b-b0E`;;%)~o(Wqv7U{m^u88h$1! zIsElc%{`$wksf%QUUwc7^TLzL_Ny+(qfdnRTn>H=IPsCc*+ysra9wELE_wl%Spkz> zKyfu}t1_bLhJ+I~0Zuyfv&DKWzu3vEEpcI8{oesDnrPdsV?>74xAO6eJo`l}t$b*A zM}5NQi(|HA%JG*~JS}l9cH>q+a~DqcsAv!e1cU|%%Y@$&QFs`LrDYwD7h`RT(Q;^_ zgR)+!ULDM`HMngSCutl*=;NxIp%X|!e^;{vJAHV4dS*d&Qpj)Se0?O)K(^QDypL`4 z3Q}2|%VfRFIf+&8Ll4zYy7xUb?=k1EB z+Xh3D28AucA_#m*}(R!&#LIQ&-0WyVe2EiCk{D3`eJ+4Sm zK4CB!E}QT-&cjXhY43uk!y4+*ciD!l`#mOI&)p&)*9P(XlhhZ){3g-Mcf#(>m-E7w zFcIp@G17f1+$?yO8-Cx83fL3~{UkF$%X7{4u|C9#^dJViM z^!2v4>;FMR|H0At|NqYa*Tx5YFCi3eMI>Z5tG2jCf@ZvWz?03I0qQ|FaK7n#2&&g9;XVFr8(Ce69KS(OU*j zUBOBRC9r6vlea6i->}FzUP@dNm_loEh-v-z4t(!I5fZtpi-7CoGZ7^sGczqc-DZSH z&S#D*`SWGFUICN;JusL!fHpzvc7*Hi%A$8L&2tC0-{G~I{H-mpM)Iq^I1AsG$FMFYqGdhfdq zoqt`Ys0?Hzu~61I4x{3gSE+EdQG-L;gPUV`EZ28n*#;0L{#kH9i-3YbA`(tIX(Keh zF=L?Brp}~i^;lzXaN0$fD%TK0AU&GwbS8wCelUkg=kts^$yylYtQ~~q!g)V<}HQX)>i>?w0IiJPBpx)~c=?6M$ zojqbh58HpfVf45<;CZ|_r{*!8XtYdG3b-)MX-h`D2jOzKkS#XZr{Z&!NWMLrDG6uv({L#$MT~VT>0ua4p z3x>ys7w@SRI-r!lIj973j$|}aPn?W9+z;zWCX;;!5y`^wK?0{yff3zwKg?e6d8~XI z)#8f2EwFDVe_r1@{DeLUutjA|y6mHM7PcuuaHbnE8KyyeR9V96^-fpexZ-wCG%KOd^B&agFdb&5Z34|oOg#}hh_MrByJn7#>AgB@>=f20UsAlQ?i=y zgwK_Yk4fROd~?*-Y3t|h@#ev8##D5W7tsv8GhOS?xwpdUy{9t37x$g2iD0D}&=RmX z1(j5FddPl&Y$5ryKTW8C>^J{5ShsQ5%mmZ2gw+2yoNUp9=qoBZuF=+G;g6<9MfKnV|hn@(C6$#W9C2w z97`#4FggVy`|xDXQVO(h3}$B_*|zGU9Cn8jBr){ytJyNUoMUm?$a62<$mdMeZHKtmt^3_9POPfQsf?BoXA9g0dV5P%0p&R zzy=H{j!F(i$x|8n`bg-D0GV|fx6KBc64J>0{jxA=VMboIKcakeM>%S17h0OYGhtLO zu2$EeWTqf8!046l8B;ve4^DEuLA;3ubVgaBYQt0O1}=dXC;2QN@6%gDAH?U~8Pv53 zqmhhoRwLO|{RLrD ziuXdVwP&i?dv!6eo1=VqQK=o=*<)E{!E4jH-H%7||8V!#e^GUN`?vxE(x6C5seqCq z-5t{1jYxMhgmi}j(k0T}4bn&>-Q6%l4=@aTw)cI`xqY68^Zf(9zwH;BS$oC1*IL&# zZGX4!7E2>9^+?QnEuy}(`Om-UU+J74*sEN{R`VBpajfr+!k9|4)Vl)6U2^Bd^dU3mbFK`Ky+7#*s8y***pEWwv zq~>jlR`i?_k_FRF?EdipKW^@|{|yDC zRqLjEp}#rSrhe`*(ehnD^olP5&$9q zo@i1ye!$Wd)LC_rc?tjK&GPhhOz-N_ulY{y^yoy4t*A?~kwoT}o7*X$V9qU$yV~Qe zE1MFMk%~?UxsYW!j@JNohf0|oo8_}5EZ4ktt!0;fX12U zWuy^kbDZ0oBQIdZ#9MU`%+>ldiRO+kcFJa^kRbcb__op^aeTop$+_&Vh4J!jC#wE=UyU}-Wlc(V{qQdHrY958FjW!1kL{?fn zvR>si+LAM|oVL!5w`9+komD@37_{n(=)GfdfNoCpeIKPN!23cGMT)A6q^MzBJ#LaH zYsptcoS3Pu(8g`X`S6C3N-j=;>L=Cp<@CgCHNT|OFIQ?jci7qWjEm&=Dt@C7lcw6C zf_$9ygF#KM)aB^L>TFMU|3Cp}o%~)oh0gP0Z~-_2X^8}OLH?~RZ-_a$$2oVHwD}qT z?wlT2n_(EzwqGYnsr}f3gxxyZTKGUnKj`kJr6iG4Uq>XR_8^XF0bnTSnDga)t00r< zz|vPVHbbc<^?0?!E$iWEskA#qqK+J#bt37<#)~xs8aRLFf2INEbq_W19#~Sz2{|VnHL&eo7X2Pz9&R^ z)8RL|+_ks}u9*~t7kmV8ESV;ic~}cZFwVET9b5RDYC?b=I8`vIKnU?V-n%e z#*a&$*$*cR6@Si{*5Q2}(ULEtbM1F`kaNN^?s84KUGxK?l52dzIc0gg+GPRuCdh8C zGLRO3+wRM&_q)p$iJL*4B6h`vGU zv|XXFky0$nWERGHf#MMrRrc@uh|BmHUK3-h*w4c>$EWwl0WxiKNUKO8cSD!!E>Fqw z_ev9~#9tM_uA|5vhgp3pv-Wc6J8~Cq3Hf(Gsx`@Vv~^WV>vGXD=$HIrWA4{g2BI$A zNW;Iq&fA9qFUxhhr2T>k^(?|e6X3kb!LzAX(wq7gCBXp+&Tb_?-jYy!O!lK8!bsN? zz_?WPaO*?jxb2jFbNelVAQGA{=6VDTIOj66rv_)@oBr>l8u_)S=rkynwrmQRFhm z!blM0RFvlDGerFnxPGrO$R0HW<|!5`@S>~o;q(EOGbX`^<5^v_ritKx-3J;mHA ze{@O`)gHf5txS|cbACLgqAPGo*UYWk6$lu z6Aji1`b)?ASGfn6nrA5QWDG3d<+slke0?O%i=3Sh)*2w->Dp%hF0>e`hz2+Wwh4;VQKIDN|5 zEIGz&2XwJ!f=R$Ji-R1QiJ+Iy+My=;(aeG^I z8htnGLUdnVsr->IC$=r$;%!BUx!cG65s)WAu?{JHB0*n<#8eR&Unj*3g`9_3(srN` zJYZUqHyz)!$;(A^ZJ)rPs#B?Cex#swgGcT3^fTWTS5|3On|E zPV-T<*EPlFlJE4i%s9g7e6SrJ@vc)MbeX0@dKv!|kkY+gXv?);5A!y@UPI|^g~(*P z^ot9H^`d_PCngHAzWkKH!`y|#gts$r6G&&~do(AgKb*?N6pYb7IOlUo ziP63iJ|d6_Tx1<1yp%HlX^yERQ4nBIZ7}h#-g8) zTk7jlcf!(?s2BBZ#Lv*8%+QyK|8b z4K~R>!`c*Re!jZ?v!U~dzga^_GN3Mb5{AoUXn*@(JLc*pMy$F)NXffN zY>c9g0oxq3oJq!nujx9cr796H0;Y}(R3;TRE4)t4wp*JJ-kydvca72mI&oD(Ml8m1 z*JYcJb9EnPiIdq}D^fpXdQ74zrE;KYslEL`3dL*waeez%1?-h4TZI$6= zQVJCMHnS4a)4Fq_KAz`)g+z6!G~1%VV|yhW8y^#*+v2tnmqd>|T|OFY47*n3jXXC! z)E+m{PiB8gHd;~KnE9UKG>+)KXdE04=oOmtQdHrS6a_8`?Y&orUVZHn@98tt;rKJCGu0`H$ME(m* z=V~Nnn`bYB$0z9~&G&qoc;YMBv7!6Yn4Aib^@kUlevSS&w$7&~2$z-?=)gs1c2pKs zKkv)+vWdomRbjRwRn}1RKL1N|LNuiLH{LjF!TCIDau}fsM#TNAy;P$3o5v2TYxu5( z=xnxu4pabkO%kptCYenCUTD_0Jru^T=liPaO`@K0mOYKmo1KghVk6=UO5l!Tly&6# zOzWvYHtNU004SblupVC{@a~;~L+AyhmbJNnJ)F|nB`7NZ9Aci2$Ta-6S;f@~I9f@G znJ&tSOgOx1!_<=fK}_gwHIN*ciFx172$hY{CCKt*u;B**=Ek|0{<3#bC}nPz+;ixB zNwSgZ+$LT7G?yr*^I^xlB!`=&V8rEdQ#v)JF}$bb9SN`NTjgB5@fg&sYU#XM2Z3gW z=nT%6)5pt=nA#iM&6NCZZ6yHO2=0>H;T&rMFAfN!)Bi<3Z9MkV=C?>V!eurj?2pKJ znk!WX-(vXl(sog6OhcEML(L_N6kEPM#i=MQeO%mR@mqYS7Bv@&0Js98F2c@oOTsS2 zMI2)(qP2iswLCTaYr#}#28!2{Z;81H_NJuimH;g9=8;mV5nFnj+k2`3OCN(arp;3# zqb#7W5BF7Rkv%Ba=6cDoJ*`_kraU}!-L;0Np9>d`EJBRrGq@kNz11f)bmq-JCB0N_|#strZ7K#+FE6BiTEjP6{Jis9)q%Mj?-iiBAYv}hWipX z=<`FI(n(v)@2i?h%AN#42?dsq+kEz>i*UEL`I@8XUd)9O;YK>7{HNX{*o8Y9%(~Np z+V_kT^b+ZT$x*eR>$75q(YJ4F zO1W^@M!Pvi_Z7jMRXG2~v?q3U>jkn$?36eR5^0~$vQn@rSf`OIpbf0YtJmv$zhV?( z(H-_I<_ahwn8l5&;~XvN`kP{6AgBrBK3KNk4D$OXUvAv%AMB-d+E9hLs-PaMfIi}7dp7icf@7qRiK8@FkoeX3AM$3lvIgIaC9{uDt?5&_M%&Elr zc8u_mO^3kEFi3Ya0L$5GSkLIl=n>iFOd5?Pk6}#-ZyyCKc(!TT3|Wt@KSdteD)h;k z5KLf6&!k#!wTtJ#_R3ysvxf0-IFBuwW}Z{Rjk#7!u+{*=9g@=T6g~Din`ShLA{wCY zMt|3g)@d=Ll7(=k!XYC)sqQwl8?-LGz@kgv<%~JN>QC`4R$#Xcv3#@+K6ZfZRZINi|L_Ky0hM8mQ+HF@?x z({xphUM!i9M!hoduN_^C{YMz>P|D)cBc1B1IG6Fx953J&8K#^@u$li*%aA zX2Hr{g(i#?cOHj`vho{bKhQP%B%&jRdUGgGl;cbuz*ej;tlHP=MiC|}f9R?{v}n0B z=@E+@nm%>o0bJ57|eW|Whv<@zw~ z`+Ptvy7=bN0u*o0Sq6Lm4ORLB9YMVm3ENR+O;B=j5np*&f+L*9FPw*EGb?}`eku69 z!GmFDpOH1U)#Iwg{j_#WY|v?MQ0Y6Wr(qjIQ0HYwDKchkx3nMst9RcUUlx`Tg5wyi zi92M1SE5b>-_sslX}gs;HsRxB5 zA=|2=a#*Y(qgGuH7GLAcjk>KQf)`sLUo)S5etuSad;&Kai8k9x75nn2^ob6WcJ1L; z@({}q+k9$_(Sac7^+AKQ*>UA0*3cT)@9Ktj8A0Pxz+KXK$$bU2&MGI*WUhF{FqE9y2Gi4%K!!a4w=Z1bB0dZOp}hc9ig_>VBUx==y4JQUrp&hD_8 zU(V$jjx%&XY|0w25crzNhm%V_5Aew@M)}jwRa`L@8*>-2hNJ?5%?mkTZ#8=PTLZ?+0)vs-B!>0 zlimB8tGu;amNgX|#lyz`*;VNO$`&j1P=N8}uYF;1@tiwVEoU4S)y6fV>=PJ3tV!=S zR!+k1XZWK0K#MnbfvajD;b?wMqNXgMXf*1PS7!r{>+QEh_cPY`$6<#b8C+ky#(GAb zaum3lqc)q>*=uE5(&ha7yy8!60Tn<23NtkP@c9XmJv80SFRm_V8bJFOx=ITEqXj`C z0G37H=4ptr2nHiYHm}=GzPT{gB)^^m7!AUpvGywTdxEIWA=6r&Z8$;ZhpnsuoV$*W zl@^8X3j7^M8_pzksEX`np4Da;RatvqStLF!1Y`yz_^9*YopbYQM!#m?p);Zv{@#J| zB++WF*}q3VneovEaG;E+^8+6jqwT-}HXc9a1~zaIE4{zqphPM-W2 zNK6*OQtq3d1(-Z`_aVqYaUy)DT?F<6?y|Wc+c{8lJ=SFCxDpZXK{}v*Q)C-7&yC<= zyIe7y#n=4dhDNW@kgL*^6>VH63y1yZQdUT^~})Dq8lkiv7W#C!ViHX!a2?A$8_AF&IyTG7kOFP zAIK@pcE2)OTfE#F8q#hTF?HNT<;^?o#!+nAClg*s@Io02_<5J;wv9mxgyPa> ziJ=8=PXWQe=2|E8cdCF*<#cP3j}SopOTIsvf*N1`w3Wxxr*ov zvUbD@uy#ToTBaI2n-I?Kf&E(%0qikZ{W7MU8mZ*GPS;PAWOn>a=76+*n7b_9feck(b(vNS$dAsgch zMU&OVX^`;Kz#pMM#>Tr}AkpS40x|15= z!r1r6DIn}0{Can`+7kY?_)G7`Fb>0+NT{iWn+kpkkq@PdIxb6k#}Gmd@Qr zez`#|{$tL}PYLxxmb5+#Uzy+^5$d!$q)`S4?sT4>NTTYA^^>4L5kEF}dl}#>H6Laa zZSu90@-hT_yMyE^kuaV0o4l)W?JW#7xj{)`RZ|S$>A3Ftp_z-Gr6`SI2GEHLZFd_)K)4Th{#0 zSDjK>`uNYrwII&J5IP*O+@VAozES}A(-ieF_jl-$Ipwd;k1jluSl172IcK&?nl>a3 zJM~j-A7j6e=M_;QO{YH%S^ezBMap1gHLK~0o2dggA%-0%?13{{<(JWz9|?S zM_P-hv@${!p6oZ7%$*(pwzGb1f75kq^TqvG*%jW5cRKn&PZ;C^`+=j-Y@?#`Y4w$D zt7672E)zYD@ZfW(ONeYy4E5;@-1AFk=galgMhgw#;HejIg$z zEi+ujBnTqRMjH_1sEsdd%gjl4=tpzb~MbE1YP$vU^|nbqhnr zlW-(?&lr6I<57Z*(|GsJo>+cX3kB^Sy^bql*!EM+N7tDdZAwOJ8k@5uS|o1o>2%(f zg-pA~V1*fuwOnJw3`#Rq4ffU`bUGpgz2}L0 zK>4KOSO6%1%MSoj)F1CI7Pg1O(nes!bI?@Q1e{K<4<8*jjcPj7I0dl8=%Rx@%ZkMQ}1HzTCf|*nxvUai8#6#Am#7rt{T+J!H=zLI-nvZx7F@ zq_C%0XCbK4PIfhS-(V=YyZgs}dTL!ru#L`(ce$UN4tt~UFQH<5{Be3H-bWoqz3*6h zqzN#5Dh{r1c}`4Jsgd9)kQd+SY_YxcjgG&~@1K^&3D`2e)MOR|)mWs$%ZUif0_1vT!R7jZBQ-;aBOiV`clr)n0ZC6BVx1yV{ba@6=^& zw9wrQ@@ke+V*o^$wZRd26x$0#s25TA97$p#kMIR4(-*f@%Z2=?{;hJ+h^$sG z{+E3D7xt7^jmGi*ydw&^Mh!}}#|RbJ*3RIWAlmTOH4THtL^WYI>+{3$A(M*tYAYQ+ z>6P|b7Qyr(CBggn+j>ddJ~-vc9OjawOO`nR2>Zdrt+lcY_uJD8Y{Yss%C#Lb`n^^_ z_Q+e7TWiKR_PrvFyT}pjc>526@`}7l8Tq{aE_-10t`Bjw2apaWv%dABNp(6+ckGz} z1gq*==Y8+#-TqRBQfo6;ev*1-bG2Ozs=k1%F*5k%bvkf?Omb%ZF%+P}o9Pz+OW_&l8|-V@v=*8RG?H$|L203k;o& zz(w|doknJUXu8bx2foL5O@V7@(p&PBjt>d37em`ZL(7eP0N}Rl z9&LuLZStPg6ReJLl(*|ca23deW^tnt1zbc}FdC4Qy9T5fZ}Gw$q<)~46m!7rv2e*z zbpe6`=lWEMuAKG?qmL6$jR>=g^mtci)kBEX%XDcl!ewZXbZ(sI@H^k~#dX4?E%BI= zN>hEDt=tNHX;SD+R%Syu)u(2Ag!gHpFb`J~*TQGC41xOKW3rVmqB5m1)Xuc)-3;{e z=W&rVUL9Un#?Gd$V?URe#_C7lJoy||Er3Bx{zH+InAHU&*97VLV&?@Ji*_@va zr-9CPj@L5{dkyz}56&J`lLGaPJ5vb#$^q>#Kd&NQ2#)V-QA#0UNs9VMbc_!Xx@*MH ztlv~iqFcB9T2olas*hd6B=*y`qd}4uUf^5hxvPSurvYgDmZ0g3pOB9qm)s?W9l_u< z*m4qA_f*jyUlIVDI!@lq?V>Z;6!z;?I6O@`ehENqBeRj7+KvkqRy8l<=D!QON-I3v zRnC<|7YLrOu_p1PR)cJpdsEk-Nwx7bx@&EznlVIA5OeT!??R7LcLtDMaPD*;UmqnE57VoVpgnqw&fJE`&A*x_z#bb*k z5JCOjfBy2v%fH%DLqN6AJXSyj{`r5s`rU-D4DdRMx9`t3|M)iwx`#+L#cr&;Opq|I>OUo}j`&Sg`PaYy9`? z4I!+5*5QA&p!t|}L`%x>g)acS+n8e8z(8-g*jUqakxZvEo{K4x;(7U5(PkpQx5JX; ztpS7Ug{JANcR%i3AmZQi{{npT{b%f=<47CMWV1*2CZ@OoY_I5etVTA2pRZ9I@BSg0 zAQAUe(t@e~%Lpw(fXT=7%_X^ah`vEWpBOS~oXFcQ*Q7bXEzrdc1K19+<(ZR6kk!uh z890qOd2=A4=`PZ{he5yfMKu(%JYB+KXAl(q&bH;eHsxkKZnU+ZmageA*p?vounj=s zYEakr8iI4$f73RJS6vVn39k1=M5*5zYJm5I5Z*VF8`2NNsPvVnPPEGne0gpIVzV9l zZ$9$4?rq#U&db_#Vxk0pn$CmpoPaaT37C- zO^1!rL76UN-3g5YcV4REqysu`?E;IGDC8eBjqV>lFKYQF3$|pAgJz?Xd0}EM|EsV_ z-W>(#aMOARV+yPJ8WyL)T|R7EpKu+(rYG5qzud`=e#tC*g$o0hyv9)1bCxYkqz)Fi z-KB?S%tPoMw|wsZP%|n;{$^kStDNcG)j~wPxoNt6Bk1H<5j<=#IXh}Fkk@f@b-Jhr zf^6pkC~jNOIY;Q*+B-dBo~bW}HwTXR@+6Zk!>jGI?175eWX{|AcJ_%=J}9JapFkT~ zu!a0!i0e!28K%k%=L;^I!NfuKY~lYRF7ZG|RE3rJf&lK4h9+<;hsBMp4?G+;^q!`^ z-Jb(htQ-bu8aUT-jLg5O=*_?whhLvNlOXdy6OZ;D2Kq8%94&Fx0-fuuv6!SCTy}=? zeTEb8Kz0}#E)nW}Qg<$t8(gr(_ZiOf@N7)ye(6t7u5;X!~4xi+5>E=zTaPkJd!^<>!ePxJ~XAv zp^?9RQ@JaI=yN-u-`QLn(0aV3ZL?oe>%@d)TBtdSh{-2xNd5US1i}jEw#{J8I^03x(>tVrD zM&R<$fZ3M*w*JN$1p+!igyO(+*qpLoH`Qo!=8xuwP$tA6(##FU>he!tyOJzlWFfm)$X*LgQ*$}_Nz-VW?!`FR3cW?K*|)BBLvSx z1Es>gcjXm?GJlQ;<@c${(`plNj|}+zI+>K-4u7N9=u{A^>)z6cEZ=8^Dm2Y%(m%0c zRmEj<6}DLlssopL!CWR?%(=6z9m;d6HOD`e0-VRC0JLVQx$@bB1)^Lh5LA`mO2P(@ z58sxd8eTmUzs$JS2*!B9-T6iLlU^z-=R)<9(I0M zzE>#6gm9lyeG;5REf+1YxXH)7cQQu%0JF?iNiw{7e+;D(39L;~PxRd1F@P^IB{m2ioqor?ee5w70>mRTxbLZ&z&JnDaU<~-to4|B08 zN5twMA7?c~C|0l>txE{MzmeB$K*XZ-B!};Qe}^plK&V7$S&#Al*eP=YFfvlIaTkAd z|BpvvR-kcYFHi&L{jDnjo`a4}aqj-EnT-OC6Q3P!uKm&EACFSM`!*qJ4gh!jJG}lQ ztjiPv8vp+sxbIeuO*wNN^mIg^34oP)RF-4&^OuQDci1O!gO`6ru!2LlqmZ@lEAvmr zf$RP*Cg+>R-X`X7sp$B4Sbo$MogS{|gMd~wvaZEOXKCVR?EhnwnPfoJN!Ko`U9o1$ z;ksnR;d*N{{T=CW4C#K!S_C3swQf+A*|7tM*Vc_4RdQGLSB8{^uMEje&lr*$wuVv~ zb7eoz6ow@3{T?vd3xlG*wRlgE(wvp|b9tIglkhgHz+~I#=-~h95EOhK1F^#D(oj;< z%urI@Yn}M1YE9)jizczrB`)uf zvi!-bdr}f0?O3xF7it$&Ay#VZV{Z8~hMEBon!QEaAL+}-KeY0XM`9G9@gDn(2C4g7$HN7t6jw6Y_y2zb_uYzIfA`P0iAj7L zPPZB!of6aow379?L&=TLhEkeT)ik?n%&Vk~MOFcVqupHjj8yo0RUPZOw-^1{{W2qBSx|Ad@ZU4k!asvj*AdUt@v_wH=f8sU*+?I zTF>qsR^i2zUh^XFF=Aq}w|c?&bJZaZkPsyBD*LhaL3b&DGg%pS}3QWW+@YzS%CzA;D)* z<$0J!!{Ty4XDC2dwlJsbaNM?#DYV}J1cf}CkMA=b4-kF=YVxIl)onlE@v``Iedon| zL%TmppnF#}D*EGp2Jw%Xh>^@R|AueuPm?K<#DGCqUP~2vzxF%e7zAoz2|8C?`eXAu zFtu{BKc6e}c2=?baTaz&Uv1wmg24~paICxDOVe%h_F5zZ9jx7SCaA6+)1cC0^X6yfY~TUGJJM0F=`U^2)jOQKa1U>6T}9JcpbM z=aA(QSE_qMpub$cha0Fj682*ewCumyClN6-pXM@`1O^E0@|~z$7rioI^ZFw$B(}V) zv>$e;x(d09#jtoXJR1D6XxLh{pfj5wlYZpMPTk{2fAwyaL?%2xO0=z4H;)^M(xAd0Dhe ziB=6-*~hi6_gOI)lj`e>evi=8)DmaehefdY-kx1;#EvXf^rk({#pW~+E$_FAmU_j1 zz4HW`$1)sxaMyYx%waVnuBeC2&Iv!OT8tL*;aGNP+6T-+{Sd1I#k~nLF~vhNDTNW+ zz2Ai#WjQWNym2S4#U@ur(~R@po&MJAz5Um^l52%UWzh@r0*9_dM#*R%D0K6~e9+>) z?Tov|*E*C0M3R4~#D5-rktG2`kiI;D`|?7Dx5?_qry8ALtfC-w=%MQ}zGr+a&D~S$ z-RHb+3TtTWY>fbJcB2RuqxO03O^R)6cE~@j>_02{0Ew6$a2+1rmY@rI9vJ=ZyQ$X# z1lHGQMDM#bt=D^fp2XX}vsxD&0}iEOky^cnZYFe>1cd`*{6~fUX?BkjXp7t1-vM)E zSXTPPa;f<8C#qCTMtqCEIn?7Y+qN)kSywBB5r~+Qy`Fwcdr2ZMB z4q;@bSCfk(_nWa{I{l4F>67mJgCQQ^!`I#@Mdmjyx5>$Ox54C97bpJeO90eCaqGQh zR^MwmU@lvi+bxLiEQ*|?v~IUj9s?g@HtcZyJX=OU^!`87{>M;#k$(pP4;KfO^!mB? zF`NLy#BFr~lzp6lDz>zhHvs7XZiSIY^s~=?$bK!0aNh`7MG?9)w3czq?PPI%zcOgo zw_GeN{?=fdcE3O{`L^y3W7C=EbhUXdGH870&K`(wOf+o$F{fNxK&C#QgeD&TWyvBV ze)Q_gS$$9+{4)h&rA8o~XEm2e*au>n$wS5iqiBI!4Pe>yn_QPjx0gKjNue<15jzyp z`i$|~VSufzDKDM)kPLKFP(C(^I4^JuN$ueNL2_TmCq#gyIX}vsoQSEBI?TAcDdM@k zR9@hC(uyg@_So_`vm`4)-#cr6p+UJhMcM|hzzoRPw=(D zhv_Gxw*M=N3i^!ETI&=A`gst94{xz<&cV_3Ts5I$lI1g&_&tW3_=PXE07_OP6TLP8 zKyn_^sIJxO0Tf($Bb;|y4*K^eYb>Yx+Su^~9+RNYJzDC+{{EggWssUyJ&`Knj{L_H?MZxdc!L&V*6dkQxeQ|+(Z ztdhDURyVOeJXmhCU2ilieRkh8cl|E0A_vglo4h_e{!fweAAkbN8p1 zzvrUxOQM}#<)_d8ddK5<}YfrPaG5A*ToBhJfDna>b~?;R8{ zyR$&gOtM9Y{>!+G0M~6K*!M?9{igy_kN`cia(ewo0-OKN7f_8^F92xT=I0kGRhw|+ zJ*J0CDCf{6W{1CKCtoJh-Q>x$f6vt33txOXwfdscMp}C_z%e)gPlejXk!%Y+B(?`y z)n=b}f-Ai7+^%=?776q{SEuwq)^!7nO%qMw^+0`f@%JJF3%rnInoiWytru|T_|{=V zavG4d=9J{OhJK*4F^SGwdG6is&HJl~?^n7B^qt5(k9@Tqhu9W@5HeOXkxD%t+Rjk7 zumnao{V1V;DOAM4?h$%zh^U z0#4tnw->SR!5eh?cHsmJ1Fzk@7O>^YNXXqs(p>u9x(-I|e*iWG4>6!yFoA&DZfVsz zdvuG~kHgV!2mHnAU9>}FTrv4BfLu>TC1U+0kC_#?W&mWQ+t0Zr72Pa=ymfUCr-tdX zu)bX$uvbvJqR?jBd>jP3g+UhWdr8%GMa-NIhm%-(TkdXQtIoDHlWJ_V`ID|QFmO{D z=xQ@z@zX=Z!dJPIu2_*?H_h0@FrH8QwlYY!uq`Clo*E_-os&-nCUgBI$xZg;vS9}V z%jWb@D0R%}y1i_)f@N#~27&OKw$1e3Y1lR%-`dYGqBURVK4cV)c#2K$Sm7#>dkI-qBJ zmzYLwe<`_wE9y>})nvn7@l*A=Yzb^=u)s~buFI?m;EW!DYo_(k1M7g~iK@cg8W#=( zmcq-E>iUc4yW_pT89c|-i-njYw%h9h zXtq*b`YOPf{CIwJs^59)=Yo{64e|VTMnc-pZ;ntWd4?T+A3#l4`z5TMCpD(DZTa$` z>b!#)0?R;J1zK(JnzOK)F2(;+Sk;b960lafgvh-uU+ebzdz+iCHBUA4TfhepQy7T3 zy+^;V zfi+f8nI>|SO;O|Zw*q2?)s|mjYWDk6CE9L``T!AdxDfuNX1$%^rQR?U!l&h~+aA2ounGwh@<(p)>{X9%yoE#ydlT$IgS>+aZQ-S(BOyHt&0P9b zYtRhOb;_ghgGV+~gMq7$T=lH+U3)?(i3pYfpeg7;39Lz-KW!r&bVvgD!L^l$nq3>S z<&BX8d|zQ`ch@X9lF8cvZ+!~fS~%~JR$6Lu8n$dpM2bWd>S61}*X?!NNzY+D{Gv_^ zre>4I#@!fcy>$a7=XCbKPXrt#DDUrxiUFd$;U>}`lY%Tz5wJ{tKhb~+j0$~(b{>Nl+{9}6^;cve{F!nFi1CPsRh&o zhb}K)y-2-rdfO_TCzrMWhz61ODkkRUojF@7*6V~%Yr#N_7^Khc>~>Rczy1POg8zEW zJh-2Ln|l9cK{}R<`ltvo7O;gOg>4@5;I!{$bb#=;n(WIVj%5t58daB?$$sh~Zdd8; zUstOD+YojY9^IPX+!J|TTr|7ZwqK1*O^{a5>xiy_xVsUb+x1tHmqLC!<%XS}%6hwX zOCEGq((631gDMtLe9IM|JDC1!(Os|B-Q1L9u7%_Rk!>oBD!2ia~$Cl`8-{> zEk)o6(R1I7-?8Fr^Gd5rI6GZ|b&Y$JExImy*_>?rJpOjH&9s{g%qKIAkTg_5I2lO+ z{g(W}tC&4LNCqf3qc<H|*)S*-h^}T&y zcRs0ZdLy&vk?wQrUbpNGGa(gM-J^!zmJd6?8RP7(k2o`lC9JxNi2#hZYXtK|#5b^K}>D-_8cFq!)y?@0czT zM;7{WMFA?H#dsR4^{K;I%Ffb3==gTSOC~SIOXJ}KDH8xu(y0z^1DZCOtaMaJD7&16 z?tw*x>#Ls$}*>vYfF*8=yu{#3hLbu2v92=6~sIjGmuVjBH4*7*fW_m=cMx^ z=l$1?1`xBzB3oDrXR5xZ4%wA1Ag>PF1|p4!F4hv6UkUo!V=ooA0c&|t^x-?X$4-`l zOswE5=3?jl%J=21&C$ec{Xne%o9OoIXUR-_MWYvnVrvnT3@$Q-+vxsUY`M%tt|ea@ z*TU$mPz{Uy#~`gR@J@JBsmQc#kxzA4@AgYZkK`=15{;M{YT&3heDG-xp86{TS~HB3 zacT3Cj%$d|-To`VZvXnwrbk|i5Ma+zLw+q$Z44ieecAJLRCg^jAkHnu>*lhO>4Db! zSIB61FET*j21%Lmn()ZS40z-=E$35ij)r>HQZY)SppjBa*6%m!@wXG-yC_1?-3gB?i2cE?ItS81xM;c_#5tkn9wR$^d6T|q4W;kt zB&l-WPfUz__3k;?78HoZ^OliOP9ZL;FVLNgm((*Gt$E)(QM11-XFUfJgUou)Pt>2W z&=|_JYwT@nu8%`f>wj1pmq;xd`9L(5ud@TyazgXEKrm9;6z^U_+f$k#L1W6%D&>?N zrJC!5JFpLpj^;C>fhWKosU4rcs=h}~2|G-d!lfJFeMr!7aj+AguiQ++mPlf@t>8KM zW{TI%I+=pQd$dw6o$l!Pj8S!}21{+g`dz(g;#B7o{;O828cj3QTN2oqsC1CEg=$jZ z%5*GaFgLRe~YI(4Wj3k{8I zrQAHpAA!OpemnMIrOAMQD97dAL8OtvZ-*WBDw8Sm8etm84+*nmWo+t=fF%7m!F$~z zHQOugs%m1ZcJY$LyQ`=VLSME2?R;2Fd9^wmsA;l z#&117@vJsGikmi3QXZr2%yMu}il)`OJYK7Ecc4aXJF%XEaxXW<22CB0?{dW-d^Rp0 zElOX4R~bwy`JLx%R3sbU0FcB=Gv2@eAim(S5;=c$)UA#g7VI?(?dD)sj(L5$uYbpl z7rX(Q)^!bhk;=zBCd%ofQTOsE>}Mz>6(5bJgl*VG;uR(>ji_ zGXYCX*H_YLc)x0rrJ?P9-Y7e!O2u8P-vNU3I=Uhq2sZPVqhiZ33+Yg!E99Ctt8#SQ zvOlIrQq+=nT_b7{0BmqQt`yLUFG{M~_8T*{5a*2n{Es$F~dNr?%2PC2i5PB5fkWv;n@QA@~sy&^NvNgHka3ni#>34 z?7(7A4M|RA?O<&3l`VRWH8QtzOIOFMFy{D3ZwN}=ytqjQmzK55hRKK+!oo)pk4EK- z=Lg6ovtunBhRIC_ZIC8cfz+MWJHuSqq$*P*NPhXv^NuKzi-`1Z1vIf(XlfO2b_^e9 zplH8?AI+HxGZ-YLWfS1qY_Xci>or$s4L&2htg= zs)pFJW{RPe-t6!TC#HjKF6&46*Kwo!;pThtYf;|R=dDvNx+T|@qu<2B6&|qJPb?9r zrOOvDKcde4segF4M$m>@BxT2vMX#+r=U0+hgN!}L6Umo_xDXiWIIE_eq>z($aa zG)nD$4DZ)3gPHrBHF9$HYqUsQ=a&2zk7Pj-lTNpZfn_u!_WDswDeJ{^Sv5^-^(Bij z{AnGBx!kaWLVeryxoX`vPCf!S!``WiwVMu?nPgJ|psUz0d~;i5)7tJ%X>#?rwrlh5 ziyP8@^@eUZ{6@<6tKEEt_o^JvhbOKleUS^~ewGy_rTZmg?WE5`f}Sm5568XyfLDH{ znOo!H2<#%B=ofgZ&iyh7B>wwg#-4z0@}*sADE}95fY=5NDI*^eYupRf6Ko`clSoQAB$(otysqe9k1t==TqX^8Kdy> zFmPMRr&yu2u*XKn&`-5@h3RTuS3;0SKCthhm95dg%&`SSV4AIkaPu4chd+A^i++7s z3ZH80b`d~pajiAcMEM1rDGVnvHQyC_+5&>sGM-v2muSS!CO4GGI{pHO#u2wR8 zZpo`9`wZK|R#O+m)Z_K*#G_4$J_q{oWLnk>c ztr=^xP5hB2Yn=)yXfF}|wsjYNq$M4Dd0=p;lL4AI*lhzO#Dj5g7tm(h(*L=a^Vj5bPw z(V6JRAnzmYy~)k@{RiH)-XF4-wK6l$KIhqIpMCcJd_Jd%xl#Y)x6o5!AyPkUNdqTI z_P%ci<$B$ivF1z^Y8{Syh#Zr^*KN=WDOiw)x(-)|oSuCLRgP~kvekJ>CI)^`0y;lw zvK(KK3~u8f)AMCrD=$=?b6m(@NxAmxnv(Lp-)0 zpH0$Zgn=>*G{iHn6=iL(P}UDYeXAwOSoEm}*HX zQmd?QKy?;`u-=+fJ-rep5WX6_b1f56m?0b4q!=z)^VZjnH9L|-wfw_RSW}$U0WP%S z{VI@|yV4TMLpyv~oH4gf2(Mbr;jgO^6DoHs(ylq}y9aKqL~;&!w~>M{s>DmZczDKS zlTNN}Bj}TOcvy+MJ&Kg|Pr!J6E1$6!s$9&CPx?ug_v<3B1;J0=iLt!yeYM-E~4H#!vQ)3;yw{ zy-{%@bjYd6e(o`L9!du~*I7R9li>+$bHN0 ziPbI3m`XgT(%8h%o+QxP}*vzkiXVY2XLM|L>RKWLaSIHP)T`TiDD!^ z8}r8aeK&ZGlPvqZ3o)$DkgDn$(_=Sx8O%D3VemVB~zETW&;n7Qvo zu->W!BfN5WT2|mVYkga3eGg|M;)8{G!QVOSCjg7}gtXjD6)U*ahe98f0h3(&JJqO2 zc6T>Y#ti+d%3D4Z!kM5n@17((?E2cY*i>Lfbr_}wx)<}o(yE)m5cjcRvM2flm0i4Ojx5 z?|{R+6@%LLM{_l}PH_1aMHkBd_X-^y*97K-GT%7bFyc zzs~zpk|(Byb^=AYk2(roHv&IS9&T6O5Nz?woEJiTu=@D6=Qd;vSYfuxI|f2Y^jX=% zHWn!;c4eb}8@ZB!lc$|dU8!?0AMD^Bm#PPCWvD|Z2kCBwNldGJ%)KiMT^fT{j+dTK z?}aDpGk2Et`s%)lyU2yy>r*PKiqD$O*Qy+zl=3uTu%`x)r3`}lEYg{!%TFX%bnO$W z)>Wuzcg?2uR`-G@DfH{FTs}2!A}-DU#lX>uqiaZ%mQqmZ z@4x3K>_RDCrEWFkU4gQ{lPbLI*KJM^2~xju6=;Hf3h(8uh{!Kowmco#WlYV}-!$jP zmRE6(Tjid}1N!6CE0A=PQ_uAKI$6-}Y`@8M*!^%OdeO3WF2vo11|Q-}zPB7aiq<@{ z6y=`+fO_>!n||88K3g+{-^*xtc1ufyqB=MH1KX4P0WnF2Gdt-wku_h)S(CnH=@_8Q z9#F9jTf`Z|c5WEI_TY>_)ob}Ek+yTs*PCwvhg7M`5le(lUe+4(kW!GuIRoPysLSYT zHZQR1+>L0at z<~BLWY}`!D>L!0C=W4KXKx4e>hBfGv6-!C36s-0HJc&g4jKdS9?~Z!7 z8@F$#bbXXERab^7%}SZ-+EI+(pf2UNJ~a+*8tk4kA-1aSy1n$ChZi+p3$d2;R((R0 z`>X#E$dq*_5OLG_iFJS8%@9Yzi^_(Ad2g($SM`$GuvmJc(6+?|k-pcn4_#$u8huL( zO%*949dKWtH>02(v2O&8sy?YE$%5jGH&O$b8Yo@P0FB60FxhUp6mEwxAu!8IJ-%MS zn)z$)JLB%PA#~h{5=NpqUD+wVOh}e6^1v78YkPFZwwCk_NFoF7(o11guPMX4ldgCP zO>&48H6GT4DlsU1IZBtk*FMpy-{nhO_Y`7q#&c**z&6mU`x`w)NC{{(Buu9{r_6orFn=% zMswR_kBj>mb*73NIl4iAabwk(!O~F=)KBOPcN_YkR`$jf5QogBhjJ%Fp3zRPlR4Kx z-y6VZ&m^7N`M_tPF1@`%DPYVv+mZP+G!=X*aU~$z$7PxNvg|O2UJLd9U)C+70}C5t z@4mkI5EhX6p0DmCZGJDM&d)|-Dk&-KTYm5CdhQZ&;L}t2fLK6*fRkn(sII$G$?m4s z&O|G}7n<0MmUNo={JImvcmQxhUW$={XcJZB`nDy=XlUIy8aR9isMf;j*~Dji9hME#-hH}kjAwSJ zd!z2|Nqn2vXj@b-=5_XEp}JKoL^L_zg*@HcQp$pO@9dp7sLM+-?sG}*l1b?7)VjSP zqN4nig=~|r<)+D(K}~PGQdPfAWrjc`p*)}HaBW;>XN<0Ju^h#@zBqRmgkNkcyqe;s zykM@rfWcoocsEREu{F`(^AzPsZ+H~6GwbX}1LPDqi!Gj|`psbi@a^)w{={a#AE{DP z>vQ9(J+Q?>pFMN;-g`YS#H3!Es(&$ZK!>ixwKy%XymOGf<`Iw;)GT;;k~C8z0L%xF1`9W*3K zTl-Sk;5vEQ@R2JQc5NtvOt&EugY0obFt*KgC=SgKIp9`hI+^w;w=#GM?iO>Bbcewi zW$D_&K`sxurjL(F=wDek#egcz1a|kifA&Y^1gO6Ug?jAx>YLTT!sdDk?eeo>T}AvW zAL|^b&RL*RBSxl6U|zHCb@ySPb3H?zcx9eSqH_5}^WZKY2-bL#O8SQMzEOTgYdAAw z14q@a>9DJ1|0^2yeH%@KIf$a&mrw>(6PJ1hj+>XgmrLHrJ>52PEHNMT`*EP~MEPSp zcxaxUHRc6kb)1i@T~bPgB;!ToQck7pO9)BNApf9KJh6uD5IxnV#eC0YM{++As9>l!-i1aMKuR_Ga-s~fSCP<-O)jS#IDK*}ADV1kLWGQZL(3xFmJ$%qtcmDSo| zdXlOqdzIXvE0?PV7gx)KvR_z?c2~OQ_fF zyALh;sLXdkzd+Sgr9a`3WOMr@?_y78*qdd zD=2JXA8$Q?Lu+3tZa)8oqX)e7>Ko8mpn1(BEMIQe^Vlzwu3gXMbamPhj*vY zSy9j$Dbs{5^so{OYgZRvo);i$J?#<^3{g0$X}YkkU$Pb6Sp_VhAP#vyXkrb#+dH{G zEB>wdul0pS(?(x4tDyvsnMU`Q=PpfcYJD#PrMThJchdE2;hCpuE17CpBg z=B}+-PhL4)rRg_;-XcFgv!f99wabYD9f%(dA`LOoWvSE|E+Q2M@%YUo2aer)V9&BI zxTVb7Cw0p3wt%%%_UK0QGl!PMZr!E%q?2<*&P>c;pw;S>+i;!w{gT#-rNM(S*TxaS z(n@O+;WH5M;idFDKXS7n+c|Fx9uDK5g_TiWWlgc$TDqej^R>BVs;2`Z^4`vPI5@Kg zJjz9tn97TEzVKAnMChO;>2LVfL-weZTWvzhN(h1LvFJ)e*~#G_*AQacqs2?kU~F!= zt0wki`C@DLt@XBajFBXt(zn#W^W-hJePnGbV!=9)pvZ>Z*lROVSstba_3tq{23NA+ zPq}Pb4)*KzT~Tj8QTZun<73_qR^%7%6|cBBf`sV}2}GPWdvMvdCD)f~ey)nzzk&CtDn`c&@&vM|oBs4o zmzDBced9~mF1#3A9h|7J(Z708r%;orGuom_+M-oj+`CC@$QQ>@Xw@bbqJCLG)gh>& zROBL-F~%}@t_Vs0od-i+|M9-2mjm+a(41+55ISbFO~L2<0+qBQ%gQV(#v9#Os%fAHnOLJe0AJ<7M#v4OkF=4U!@{hTzEpsXY7+SN97`c;JT z&sOQ*xHUN>Z4QO=N|$O&MQC96-!LZEz~{25P*DR}_H|INPR}B78#dZ>=65&}rT=qu zpg^~qDnu5tZ3o3Xy~HuQe%a&V4bKUmui#)09gLf+UQN;o&2%}X zfdm%q?{SU$nRk(2&i(w<@m8mBE%nUi40AMTDTiUhip55Lp&N#qZ=m!|_PL*yGl}}! zv3?loAVqi3FJ)#^)BcW5>ZK;Ta4lzH4_f%{(=|i=Xg}x(K-&wGKKO|^l1sW8wWJ_r z!qS5c`fM*W0&4_r6ij3m{`GVKDtW=5Q#tmI+Gu3vAZmb`tTEY(aA^)I{j%*RRNJ2YcC%fEMJ=;FZN9|i9`&?clpUOQ% z_}GYWnqn%aurgQv6eHi+^jibVLhd7@8OMYBdn{Wa&kvuyv+c}Ahd85?R%*~aW@ST} z7v+(Ho_65RwsDSQ(ttSUcJ;8oijOl0P8wjqP`ER_ayi z-Z8MV(J2H4-?pjZ5TrrTLJ?(mIMNY4$_uBIM{09Gj(rONE&m7{RbP4!S1)VnJi_74 z_M}Xv>N`6A205cl`B3M%G4?Tt!r3GT9UL>;36?Oi+^)GSUep32Bm4{Nw!vuO zW7hpx^|GMuT^bpPUVUCeFvK%WJoX*t#ZnV$dcH##B7qNcrY(wZ-kjhQj8i4>1XVID%SD`7ubm;*K0^K|>TA&$f=7BN8B6>2(2H4g@8yLc-Zn z#2xbvV^YJWY!wzGIJUFXiC)F7tLEDarww}%wu4e*g|6=(!>>$edd~nC2y;({74NML z7dcp1Zfq#=?V7*B@;ms(>Lz8l472_CElvzvJmF_znUtt%$V$J6$gwm_G>)8265Dx; zUiYI10fXZ;GOWNbgBbX)5=4|&> z7>q`S`#Y4C!}aalT6}NRj_f;uptvngV2um8hdTbfPN{o?Yq*rE0|@&3eh6wkACLot zK%fJn_^egQ;$E6Yt*Q8RsCl_CC*kog#NyVQ=d%()-Hv^;25U=0fms8-03v1To-1!k zQT!~G!4R&ITp)~SXab$3Ti@!;Mrsk+a^Q(PD}P5^V1Sl>cj$Ytr{ZM<#6T^x=8%@b z2L=ht4C_h734X4vhaUN5tfRk%xfLP@YnYwm%)36Iqlx|`g7D)#g0)(f5ua}Hi`7oS z?AEpmOtknBtW&Pjo|BbaeP?Y%IW>KvvM=*h*roUC9f4w>h2x&;#N>!&k%p zd&w|iN~9Ja!)F{S=CqaO^qa4SM?9B(HBBc93_0MfHdifZp|rfkx7r~_hQ$fqc5qM^ zj;?(3YkH5|MwCGW)FGaM4(?kflp{ny{dqTnMrkoBD*YE-psk&e)4-4?UlMl3GU>G$ zgoOAM89KU#%r+`e`!Hh_V9r}=KgJ)A7JPQTSsHRD)-sZ+Z-1&}ABHd$f{@I{#YsY^ zd0cDDkBUsktTdaj=dZJ>N`=uTei}B-; z@a;f6B655uH(UgOm}+FJAD0;ENH^1xpi^*dd(<5>;{r0Pwc#@~LS=RM7>Ci;xS13r ze>*GluQf?UD$=IedzsfTr@LAMcF5(ZeBe6Ydleg{1kO4H@DT8;PL~)#-LVmI`X+(e zk^(8(mafYrNPB72I?OB|5$WK%2e2=>jgMD=zX7mb#*AWk{h}a;if&~%x_m0cx-upMIrxw!ixFtWbt1n-wlz4~K&bQZ8y#tJwDW;HHV7s$4bK0VM z-Pg}|bTR0p(r%BCx-z$R{?qj(FfV_}33=ovImj__caiag&%8}(48y*Yvf=WOupw%A zr?OnNoL|ga^^@3O;(XZg`Z}6pIaS)vqCM?8g(b7oF?dnMd955lO#xP6)uHsSfU=FW z!(k~mT)*1v)MR#-)`+Dnc(rY$VQz`|BQ~dy&|KI@kp#<08}GUpYVy6{!0L3J%=!m#!vmCiR@r2^Bx$>ji-e_r<+@=L@ctXGh-ZWWHt z*R3-k&J1MjD+)c-9x`tyAM)9X@YVdX5c0^fD&$dTGEd%pJGJa8NR-2ux?BLe6=FO7 zJ^>(@6pZcUMwEljXT}6f7Qqqd^`KTAq^Y&vS(_l*X4d1Z@bU?t-PThX90+3%t%MQ& z*rLp`mccADx_18UP8&(2u;HsRVX0s19GDh6!Pp+XGQysPufa;UX`S?ie(>D~A=Y6} zog!&JXg6YQbi6g1GIDPz2&+Q%i(PIve5i7%HUZUgcE;8Y@`*Y_!O>k{gC_!Z#dWH- zQYpiWoGorweD$NzLPZO}5>chg1|2VvIMR=biX!rPRiu;lkT9IZLWUlWD_Ni4Q1~jc zVarej0e%3b{%6+$puluF9jt`D*}D+8+Cd|nL6DgDH=u9}Y3dTvxm&i)kJ7Pd3AJg6 zpY6f$UwO^3+_ByvXrv|9?S@3u&Nk(V@;zG4))BVf=sa+VE*gn88{yZJ>L5b+ik5@} z42&BUULP~Om4GCoZ-ncKBF+(!(gRoH6-Sr_anN*CW@ajrytx0%9vSw>&ODS-*t$Y4 zAzMeb42YGLh?rlTRYapA17DtMqF5*J@7Ch2MX9RqRZ$EF#^ur_{1;7&;{}Jw4dP#M zWdRB#>*gS+ucD(MqJzGmzSL;sha&1uhtH%|Ce47-y=b@Ns5Ln)-Q zw4`EW=6RDyXJ0DbsCcmvy8eztr1kJ@3a-Dmc%?UcEY&I86Bmm%DoagVr+4ib4Z#9o z8N0KgWLWIRpK^hlz%dfNI9LwkwOJwk$C@JpQY3rSajz_zdKjZo?azf?5g@i8mbq!V zA_bOz{a(a{Qf2wIVWzFMBu{IL=EIH9(ryneCiXae6Qc!L%Lun+;ha4c~EZ7T!k3EX^ zgijRlqn?&IVfWJ#!8KGdU&d5KS-I`N?s7xp$g97Pe?aUpXZzYyA6h?QhSj4vYm>)J z)nIQqWssoWHRQa1UC%QMRpVyE>LA#qo-B(El1+OaH=@=`-Er%%iXS=Lhf>9?`z~i= zs@H*3KsnSI$dWSY3nG8MKCwol1sH6~Rq4$G4Zc(NMt_oj{Jj+9SgTrI*{peutZ%2S zQW%f)1;ph?!1l%7_HbW##GT@{_KWZup7de;@tHvMw>~|OM18g(f&}LCg;tLE>!0pb zJX8t$RN9N2YILeSC&m&MW5;XA^6nsOct3iI!_dIHVO-si8AvzOI`{78JjnIy;%WFAuwhhLXnf1=>9Nb%)ApW0 zk@8Xm&G?uSzwWdM+KT6@91t1?{pun{1l01(Er4F_-zg<<0LnoV)YueH1!x~$;In*V z;mS*4*R7ghg3>}Nx4EVDtKXV^i>>oDOB&tRWI0mVC~?c*^8~F_KFX2FEp!Mwq8nj? zk9v2;i6Wk*z{SywH*Ek|lF)b*%)rT7e zzsY@Yof^acVu7C|aev>DBNGiCF99(ubyl|YcVwSx^woj&EyjL~NClIJ$}_uNX(Xr0F1XoPUeT%dyTJzCiD(l(s>AO(3Ui`6lZy!as`Nth-H#HL5I>J8k@y zG6E+kG$OiHZ+zn&>=X*tR?;()9xa&OHM`>b&^;9Xi4>(dw?ZeeYhLP-Ywj95m}SbK z&c{kM7~!B^O2D~tj+Ymn%V>z4PWHyT16&ie`qu#S4C6xtauD2w<6;4t_VZ4lBwDDQ zrSdu+yN>3@D3)bbK(=AiiJ9sS|IQ1Wls|0l24OF^l4DZDJKCWHRiMl3SkS9m&~&HG zV#vs@;4pe|59J}50*j8cQ*3ID(gr^g1Lwe_HrXThhpxB|drN*n_s z2%53A1+Z~nx;E@ib@twK6&tmFwKRb=G=OVJ!?r3O!$4{-qo`4V@8geq_v``Y=cA>Z z$44BEaPbKzR@iRZ61K948MSu5&~`|06*FYw-k;rbc4>PB+lH)Mm4uJ#1Nm7W$>n%q|RyJ0WJ+|)cA$41JVB(S^Rb|NsD{&MhL6jNg zPb?rMTT5|7^Y1GezoLi_cN9LfEMut)EIvtyArSc53yLy46X2OGgA)^0GPE^%CJ9Ch zInwcwyih{Ne!IxngnWaLF`yENormnB{_sHgJbIGq3 z%^i+h+3!4gtE2q=>g!uQZjmyFPhXDTd~n6nL$8N%jbqf<<4fl1qHW==%GVy90%GpE z`@}vCjtz=U<*6b+VsP!ktf7x{e8a*^+wm3mF`Nk#(J3CZ@M)Tl%;reHXa_r)G7D5J zOmiCw0gUQ?V}Y4@rqacufN|lW9P5}O9Sc!zr&H-k|Mvn_+a6XYR4E5&GIk)@RPDF) z$DKq>x6d^L?K49sq+EK5qOnWe(6jLNgs$L^awl7E7KC5)xLwn!VEYb)fNlY~BkN?W zgMlykAXvZf@G$a7x`ex=<8JcWB#f@GJUVA4*cVLGMTiMAr*_P&PgM;}Hz&T)HUQaVgJZ7u1r?N&3rt@mn)pxTu|5V9ck-=NoTzr%>%hO#w<;H^8Of&t-eJq=ZwV(BipWTok^<5j!S@=%R1| zpavIdH<$va`?Y(&eE1=Sdtt8>?cXK7%3kXbHpl14$Cw>JW~ALy1W4!5nj?Npm+Q){OU?=@5 zc0C9#7%1!2{Met1`0Uap{NO%>Gf_m~Q81iEbWzj&@DwB8rI&%{r9a55_xjimH1E{D zIkRkK={`6zR|O~*-Y0zNk!H?>SIvL)tBz}v#jjQ!Fo;_NI$ov9LiN&00I{g;lG&(Z z8_P=arGpIb%|@zfpPdoDb)x!rwxH{evR)D#@%1TJvsQ`^p1Cgxh@YMsIl211`FzNt zH5`Z=^)$G)(|kSUdFX(Ovxk#PwErnQ32Mdk~;xPjSHs44g;j!k20IjMpz(8?Wh%$VK7dp zUK^#Ob%{(3RZ*snpD1+4o2(|<7E*QJUSdji&Z4(cMTk-A_T39V69P*1wQ*s19E-UA zj~2j)*HR&Kp=CdL+3I+05NLDVxZ2hBNJ+BlsEuuvW(@m6DAdUZKe@$xeuC{HHpjYR z!rOoMNTKb8#gPu)>y^efR-a*H{kFjqFyHJzPq_HFS<9t0I(6AAxE8!W5g)NP#&#Q| z%7;CqE=YXT8;b?K+cP|>=H;CK$;>7InM3(uXuSI8SzFdkMQA2G!Ptkm1*;A36p9mQ zZ-WU$3<-OqZl}1FVg9aMA4QM>dJ-{4rNl`Nc>63d3x|7@+f_S%vpw(QW9X{m{gJ|= zw!;bsy{fr{h1w6iHo6%s#GzN~YdWvY2Hj?K1mmweSTG<|303Hh3w%diYNKE-g8-awjqrW8N>~8-K{Z zHL?2o;i1Z{X+S+Lnm4DOxO&@}Lbog=rpIDV&cg*qfhn)hp|ZG9BAEB^h)6G&Ou8u# zjmb08SyqrigA6@ofovxin!gk&)DE1-^QPmD8nQo=x4%dz{pCjA@jwKhHmuzo@i9cz z;kkVq9EN?Xb2vebU3$6$k;)FUZ!YJX+%s0+NQ=F1AWR*!qDQbQuw+Utap*jIFN!e5bP?)QQsB?u{MOU z4WZlh7dX6Ds;0N?Q|folrSLQ9@B-}4k727pjIFmrjPagk`eW*zWcIRw-S0a#8dRO@{#3rA_%=o)T^O3&42U=I8uf@ zw1vaF#bSCn-z*a3o_QT?g*tyiQJ}1BD6&}CqAvLE9tLv&?M2|mT?KjQQL=;-pQM)c z_&|CyKS#&G%h+YjVzp4p-gQ(m573HRXsO|F*n5&}`NUZ--g(7F ziI)2Lq)E$zj1~$;!7iRYhCUBHNM`IeeFxUlb2Wq~+sh-D@+bC7%Qx$)6Y}@^_KT0U zozFmpeoQR4W7f)ITRsJ;CdkUCi-4@`$_`exKwJyPBFieKiNDw4;BtLqJdpEGeUIG= zhAJiZrO)s9C|i$tN-x`z!8@u0_3QE-6{4`YdxaoN$T)B-plLJ_Gt@HPD2hCql2wDH zuQ1^xV@oT4WA|~&)$~YEhyiFbDl~A5Jv~m(cID8dyPDadsS~S=v}uAGxIz5 z*)S(svS7R0!wYq#Z7Ci%0q9^g1R!zM8amXwj*r?RaorL-__WBqvQ+0DFJ$#gBFg9? z3)eiGI`1}>grv>kc&AE-v%>e?rBOgCJcD-_hyqCLv*4m6(&5hie zKSlEJ1v}Db^1*mEfuImJwAR_z2UIG-jK>h|qg^Sdle82azAvFM*3;3&(zBFk@u>Rn z;PU=0)5fl0Js+G8h)+HAK+;t8-Sot+St)Uucxg3^tY2a0gpXvczl>B)%MDhCp&qLY znX9Vww>qT#g;>3Z9Sg!e5VPhblZ7LFW(0<*ltDGuQJ`}0P*)u%BEJ$YJ_+1j+(a0? zlOV9Uis6&4RrwfqthR};kJhvRbOXML^|-B!S`Nd%jF=tC))*jB1WUwA9B{odCF}FY zR4N)+TC^QJ(f1)hB%dCfAL+m1-KF{DN=14w#k;*=&`Zw)@!8ChwAAf^N9qqyD}hLa zJE=N$v-wj9-GSZP36JS;cKMau><3aZgaN;2A3B8^qiblP8;?8lST<983jl$*twF|s?vNj8`oNFv8Y z{FeVc$E3>AJVfxd%*i#Rm~web7cK2T4{eIH9g#)_TJ;_XS#{>%eL!sFoBi%2_iB5b zEY;$!Nq0T*TjaOS%02Vw@g4xzq79Zs*=>91YssRyY6C#YH}NBYZPm%P+s;drOjGA6!G%Xs z9_8W=6VlX3)Vcm`Q-sdw3&ktD4rvaP4YB9UlUS{+K3iNMO`sI-l4f7o1SKz6b|=y& z7j3IOJCDVyBdPIIuED2C(t;?2Y6gxDTb(aUK7HI0ALglx6+n#*swPTHq`y0AF8k-v z@@$_p-dcwDT@um zOj#u3eq>A59;~GS?K^j=1NrQsRewN}IA!aVx0#BCuz=nD1Mi}AA)i(skcIuC$9~DD zw$(T|m9Sl#y5W^cOGwzXY=aE43$|ZHZQ5u8%>cTQW{VF_Q*rmAltHhh*=|3(5&7&N zWz@}qCqE1w%IcXVoPKD$X1+C>`MRtd&_&(oRewKnnz?{Ad4wylTLy94F`D`2(_&_j zZ(S|^F!&DWAR8G6V6sf&C!fYV(KKbk3sK3AQL;7|7i-xi6Y+kmk&8SffW*TK0>CT3 z$A{j0_LoeFHuioaRkz=ffaw{Qva|vd3d(|JK+7|8&syR)+eGWY8x-=@9V)?VOqd|Rw__z|d6jfW|nP7PXfp~BM zTUqgDAPY|O+X3JXTZ+UZn8^$fK)RAGV`R|W4ZJF_1Gk$V%9r!+#7lcdNKMb`j`yNTs0Jn8n?sHta@cbYIXOcZFt*7$CRTKVcDtxPf- zi}$di_+rs(=t#N1oG_p~0HU6{js)V1LLmk1b9J-zs%u0rW%F@CO zr+{;1OLSA<>GO{14?uJweqOZMKSb1=_s1RlrUCpZjb)_FcFyZteN*O3*lcwUn7&U7 z?LoPW3*Kitj8vi#)+6KQ{3uI>vM|jIQ2+-NYDdQ1@mK7N>USc+DdWCv#^B2>qhRl* zQ*-w`X^&QP9)Ql6f%a*u5Dz)hdfkBDaRUXE&*V4#VQU`J(1cqA*L_c&=<&*sgp|q} zM+8pwsrTO0$(zrJgicnU9+`u_xJ$TL;YMRM?++@PKY-}wJ|w&KhPaX(*nD%T|8g0w z44#RJlQHRmIl~FQvE75Qgn^29fs}h=G(YYG1&eC{gBEEJ6ui20!?(VC{Fb5hpvWVc z!AnnQG0~_9B@xMgglp}VvPM@Y8&jAMHmAQS&`kM2&PlCQ1$uifZ!{^DT}d|^!2@R{ z_fb|~)`(lYJnHAJls>r~(XTEg%G(#O{orE)>*-VZ6ZSRV3@V$?H?x=d-A{a-j~pD4 z*_tt5jYSl?9|`(mMf9%jbOzq9$uFuKGxkQqHS03~68QM|Nvg3Gjyleu=?FkyO{nTn zw%(f?&~yXVu7(!L-ns9fGojG2(d({;zO{smuq&Xgzmr5@2lHW%5)ltI4a85!CCDJq zunUGf%6#~d0GGZ-D(7K9=TM+zcv zXzo=joZ=xk_5$N#%D{c-NFph@&9X;r9~CDc)XUuUQ>fR<>-yk{?NYuX-J$#Cnd)<| zRnK>m#>{7No473wd=Z5v8lS`jQuyvcl&We}vDY?6NUk4245$_lW=6}Qbas@+w>3)V*>N)oaP%R?Z+m zf3OO;aiO-Q$rVM#(%qY>%$V&w`BTpm=pIATn9lkK4>g3ABHWT57L^PNH9; z&-!u;oSK8Mx*tqbX#CLv&T5}0{=jt)jTz#9XymHilJ?OY=|CB)4Ut!n`*(NHJCtY& z?FOi^E`0MxZ~;VNXh zq6R~gepmYphPGnls)tjiNf2q0ZI(b5!Ty#G)@#FUq10=)BU`9GgmVDLKA2k%oW@UE zWy#F6M)(4f zHrW&P)NqDQ`7+O`UzLxdTB zD~{zNm!EsH-p<-wOxLp8cJl1?bPP6kUj_A9SV_du$J?!d-;FRG2J62kT&vb2%ASxf z-7N9>!Xj&tsqV&4H9CC*S@NA@Tz`{ z=KT%qLCr&U!jC2E^hQFMeiDZ%-y@^kF~%ZkM}a6fFPZXM&4G7$5A{H)DQsQe))x6C z?R_FBmfSYe(c|8|I0AGRVvfTyDM=F9*_k_ZoUXWrK2IJ?3+CFK{}RaUGMFpX{1|vW ziPus??(OS5^697g=iEzey-d3AsVB%#4MR$oTp=^K{yugeS=d6fpAg!O7*pX45!&G6 z)1^pA(=W@iPeJd!-Ad?nxOdRy)nUO`w%$?ECr~}AT%_-(1gHe{)SG1-uZ`zq!x`)> z;$M|#`+mhvB;EHplj_sv4T`?ttlOgTA1ocaO02TQR-Y#gh3jwhu+%NF4b__?UXxe1 zTHr3h>r@JzdPJ2hEvP*)KS&K_mww31kpKL-)Dv_(Pw&u9iuFD(c?S9Qc4vUG=%PH~ zgzIYSWlog6LX&GdJ35dqsbnudn}b4QY<2TXstxe=zQQaflCT=XsgUsI^d zH=_GFDI!WFEh;c3P9DL3O0fQ??|ooID!D!pE9$_w34-8=$ky2Wdb8Ha^ap3$C*B~W zpoA`lea8#}ZaPN0a@b1|ieY39Fz~5<^qTHCSRCWKjt%u!A}tM((4j9bSTS0OPdUxF z(S`u-mok_G@E@l zVzz{{t74I0%fX~0S~UJG>mTg`f5WPhpOeW~-z~9|(PJKg74WYbS~nh?Y*Hz*yE^ee z3~=itv&CHBlC1*#OIr3E(YI3VuKxto-{vTa@r3CGUhSFE^Pv!+6uGkNII8D_*}#5| zEmV@y$wfd_wkpm6YK#*St^XITQ#m+UyvAED%S_i^emz`;(V2Ir(uT#RpoUV8-a6}w zP@CnT7a3yYxs%jBGy_ai#<;{I>-YVd#T_$F-|?6>UCuxf3J!@WF|ag8cjo z?)ns=7H_tS{5nSoMTMmd%blS$*VcIouaTdQq3DP#JZrrPLPnErDl_R4*W4r`&l8q@U*SK$uc=8)G#%+ zk;m_T!z9KEH?%WY<0CN&5{K9Gb;c(5!>u+kwxl*b6_!4Xzx}|`FZDwGrSFbvT?8lU zJFo1aWkG&m-!HMr38q8iuEZU?g+nx&JaKhpbpy58(BNT`I>=~?5vMse;9pA&IirG= zl7b#97$%w#7GEuj*&zHZ4!a@>7nfxS6^L5##w@<2tb$rAUT-U9hx+WYxgGd4NkxNJ zOhe%BBLaSK-qm0Fu??#SW!KGIn!q!yeV8RLToS?C3-<)!`Hu_qgrUnt8Yc&$Oy&4{R zkFR>2_K$&(F0yG{x*N{=CAvb}HsXE+Mr-6H71PDs0!S{u-@@}BE{1Y(J@zrx{(n4^ zt=RGV;Twh1tx>ZlN}SMpdbPd^1^Sh;r8>rO1J4RMm#`!6pc%sb28k89i)eD1oUN=Z zt3PM)K#T1Z*wY&0TUX)bpAl;v3uR~1z={-YK0 zNe3=$h}bnz{lf}sHVx?${swD{Dj~q_-hX^;k%vtqpS?ks?$7@?dqEwb71l{UGW*X< zJNlTvB>CBa#aDRD?@vGeehZzopvfEN%1^+~{R*BxXbG}??}{GPYn0`)b?dtGr)9%g zBqvXQ0?G&PGd&-lyV4hZAky}HIo<+$_UwRHlk34AWr@qkqFxQmbysKQlt$#AM%Hjg ziO%a+iX9Y0>gX$^1dWWd2UA77VL$!gQB3)J?|8!ogH)77I@+YfW~)Z;`^R-H@9p zz}9cVJQRb2ZSOhx%nxd|GGEH2KRAEA0=%>x2wW5cyhw#u0al|DC)Gi8nSctjq>IM3 zvc$v58G*9ufIt0*(_eICcSRljj$Vl0fEH-t3dp$5e10SB)|E5t+R-iiWUQFK);b){ zXK3Eur!#WVR#n)}8t6^c`+@GK4hHU-CW#`9s;uwjS{RV`-w(e$7MNZ2p8$3=j^11T zfGliVU_VO;28q<6(kp)UBqK$?rz_%Q*6I~Nnvf?+JQJ|`D|byG|M^0;FhD*60($w&J{uX`xhFKWGfQ~DY= z5^ocWWTeh~{jEscfr*Y4L0ghBz8yf>{H%E}!g zTBs)@l7Rb*AM^>CD7hGx${+TrBs2W!)N)8tOOUu#nLz(&9D9Yfx7+r+z89y^0#M|@ z`pY{jMHQ2-z>f?em2?X~?_JK$%+!wu2YhG!<7ZMWNy19W&eqhu;uyzf7@^+6Ya=SY3jY-s>DBJ3CObQ29i`5M(|s zQ%g4I^vAOv=br)>8emqeW_D)u?d(=Nz z5pMI=C|%PjIT>uOaM}i*LvU+YT?pPD^vS*azq2eunuw?r?0~#AV(;;kG#F~GsKt-f z7|gF?QO|r#atU8OBy5yS`%Cus8Cc3x)t4OVR6S+h{qf-8*K*1mAJFes_O&OZd3r=8 z8%xEk*1Nb(bsH+u&Ea#*Pij~F4J;nUo`H~YT`4;ErSEW1%yyabyOZa}-0;i;b(|D)W&zxyqjHHQ2Y0G{NCNy|XZ2%K5vBgmP$E28?T_Fn@?OrlbMT%-c(m6vK5v(KVhZzL6@ag|z~ zn$ErV<?iex?)UiV=ezjV(%d|LaP7A#%$9coLmDs>f>-FK$y|Z!EC+QCH^{PK?o-8=h_( zcw=rpNt13q|J*-Zx7URC%X7|71M$tiGU=E94R{v}9xv2XICciiO*L^+m>rz{VI=5c z!3#4!m3aExtCRP`Bk9^4SlQ2P*KZqoY}>~<%9=L@OL)n_V$S}UI% zu`SOH>IzQdZ(NtBjH>@{^2?gw+y#ebDt8fB(vB)+q-Ix*`-Lh8l#ta(g-ZGN3ormAeso+d}sBnh~a>Gzyz;NjQR zmW}3tlcFED6McoE4hL5k*pUOIHb63a=|5Aqm;Iry!q%*{wB=x9+DFu1k}RXKx@Bb2 zI0JgSfRx)KK9q-9xsuyte*7q{y*(~d0f)j_jg~*oOH@IH8Z4T z#xSOr?;KOKyZ4j)WU1i7O!R zd}{vq4fQ<#KdmxdoR>E+b_QpjNtJqRhM6dBylmhaBPDOL<=zznbt=smz1iGQnFs zCAVR}@C`H9ssnvQW==pKOBx?|42b@yHQg&8vNpS^Z$=XEV%>LP4Osry%fnSlh>yPy z-+&*U1N<<_H9_tNxBk;k8z5tUF?hk{--7=3vjh^5!p?ZhEB;$;|M2Gc3y^_7M8Vu- z7XHnn%@_iwSd5B}j5@yAx7EW98&GFo8TkJC1HqS#K-`&g&i{XGQC6WNu`2#x7SJ=z7iCs4|;4qf=`iAiwOo+eSN$svkS4BCGt-GQ_Oe-< z>IPxr7m{*h>;|Y|ZM{SY^EVHtTO3|_XpNYwO++l;!X`Z&vbzsbHs7not3z`adon1k zBas}+4zKh1lYsI+%*|l@S~KEf;AP$f;`tllWhubrVN@pvNOCDU`WJhwKyx0AHveU# z@5wA*rvwGR4@qXcJZICxH!jhN)J(oB7sTv6#GBV1pOgxpUE+VhJ4Zrdea^keQ^1DU zO|Q0t#g2RUMaC{(h{V5$Tc_w=md1g1SI7HTsFU$gbge7y)}QD|gQ`&>Aqz1#t^&%5 zNd9iOO!+2E8;|UPt|sD*H}#svbnq;(A)eAqEG|5tRO3TfSUF+?@mxN<&ashCqymo$ z+Ewz+u1Z0yHm|hIKEO?_f{%Xa0>*;f;_m4uv6GnnnUnNyarRCzsS4i+eZe-hNrOAmqsp_CE zs4(h7)8y!rWe&$jFgyvPi)Uv7d@|l{Nhv{QyM*U~}ACj^Wfp-AXz)?25(63fNzDy51Aq6 z%aA_&J~5G@?LhvU@(M|w?m@RbeyeN%9$LElcE5N}L}%uF0X7EHpfUuPDPjkuv-Y-x~PPYGv4;|-7`tY=lzv0Qgk37H}0Sp-ReLnax;5<-ru zwW6hb+Yj$gHOj+W+MaB?g`m#AWi4jRZCBtKU18l;@i4YLq(2w;+`tYR%mj^p5)*OM zT#ubAmX?*rx~1<2!mB5@6tJGe5xe4vrS*7L2Lo~tSIW!X=FsW2Jyf;gE=c=l-1_2V zT!|Sj=~hHe_mu|yyv0Bx-P0uOIir(rYtjM`Jx}(9yn|`^vBBU*`#sLlB|$Q>nX;CH zO?E(T`}>>Ye>a=mgQjo(n+nGI&tuRGZF06vet00RXDc7(+O8H0GEAQZR^0bT4HSPqjRvH$-6FCT$A!N zW3_@CL?u1_2%z9rJPApH)!f^zmq9jP^ejo5xo17MTgu}eDDb^ZJw4_n97-H-B(K5P z4|$fNnJ)>j`FOdS(gab7tezV;hcs%pXI8e0pe1Ks#*3-hJQSaUz9 zqnO8D6GFvb?tjwd=n3ys#_4X{401PZA`*^E77O$jU_O;Q9PZ|9v8-y!LkGUw=uciM zh3iL}jp%d!9E@)eB|`5*W7(WpWh%Tv7(M+-Q#)sCaXcS!IB;>wt|BCVeuJs~)!miZ z&F;?u4_5i~kFh1|h#>b7C;Zr~hgz+Ks}%?8!i7kCU(2Z&ulBQ15j4IP9`$pqGCU(vCNZnOfvZ))NFGv zf6^k9nKwS89RI?+s4mo(n3|)Z8wP%<2_fO zEDVSqzqwfDs=4rI{pXuISQFA%MC=mWKDkfP5Uv>;WpA6;tiv3+Dqbv8J5TL24MN4P zNL!Ayto-f7;zqCtag&oP)cX+$17N&rU>@oki%TWrRtME2JJR0kT!{ zxJ&t(SKP;kS*xs9a^(u~Li}pH5o^*5T5qm;F-&1(yEursYF`#}<%*zY{pc6rZ>(ys z>mq~sBB{`i8_l<2haTnC>rYsW4wRNR_&VCgb*QsLdn=mv4OB8~6Fw9eOIu>?6vraW zgm?K8rqM7Gdii zpwaO(_HOmu6!d@wK78&-$eBG==%G)3g@&0(kUC^2pz#8|vjBg2#tA)Ya?K}3P+M#) z*_WZ@R6SpfHPw?W^(#;k8k2Eid`Op}IqSbBJl>X9_4{jrpTTG%H%`wg7iWCO|B+To zvOY8n)wG|Jd&2k4yQuzsjjhIAW`@gyaXy0FQeL14e8%*_oOBU4roT`Ij?Tc{gl%=lQ*Uul!g+LB3b=z)$8jFDe& z`@6P?y$XYK3nCdOIt3Cey)0|;-Mt;-$MeW9U%m6^5~1Gwl+{rVb0-`+y|~(OHV>zb zCLbNBjTo&72s5%|JTAcLSs-Tu;Gp8TwF8CopEfFMs5;fyZ1$e7mU=9uHn>%#L7d>> zs1`Rx_Jl3wWmBHfg~+b=?K_sSv+56T)~^kR=dIcLn~Am4{$9%P+=J1S@^=~e*(T@J zz_Qm~E3u{`$@tg~F;w7I=7smi)$rwwcAv_JZ$v4jSLm^D>ir>14qx7DI7-wmqWJPG z4$swp?wD@~9hCg0kIp%C;~Mvgo!$dTZ{F-0)~Cdm!P&yB)tJeoavw6i-Jup}w`eKE zp#yhJbgjkn%it{^TO(Bu_McjgtF4)dXBF3ax`Kyr_kBwT9vcccy-KeD1!N{tAEetQ ze!@_7MU=69K~&dD_bj`o-9`^ju`z)%SdJB9Q!QX@B}-1br3X8@8U4P~z_lAIFBs-d zI^oN{Qk4T*$-J4_<{Kk(R81#TJ7MAP#2|iC-wgM49vmbUwJ5R7ip5@QvvfLD7@lRc zW-@MoY|Cniwuv1Z9b(l6X4fpkb}Iy*I=j*{363j^ZC@5S7^J9WfTI>CJQg~7V&)Fu z(vWI)3d?43h;_!3&t@v}E7$69XK48|*wSK5s>zB0J3T6(h7>c=1;Q<)R}hZf`K#!j z4dQV?R{Har(utZalx;&#GmOb5zgz8wb4vo#yC!SYjgZKrenn%`3`P?(v10wg%oA!*3;l{HPV~pL_6_Gp%<%yW zn||sPyp9YQI8tumg=!C>9Niq(O(@i02Wcy?Gon4-tR%E-`qCa>r!)ghJEY@eq+Q*(ql5 z7P-kNwaZWRAy2vr^jO8io-Q4ezb-feT{;)U}1tdhXAEG9?XE z3zfe5j`1lX!*n|q)t^Q(i}(^bFguvp7;Y%cx44eFBvDyI1toA0M-d`UPX-pE9ZQjMgEawR#f~CL{ec z0h}XR?unnd!1#Ef@^G)I9*kWb;+D~i7!@q{ILx$|9?;+g3RB1_>e!~{SjJnrH<$&l z@o3@(O{U`tQx@%dYeIYD;D8f8oC zr~?lYQT}^sk8N?Akz}4KT80(e@t>t-lKkhSGrg=7xadZ9=Iz^WwH@@}H?T(0C)r1g z1L7jwj3(T7W6BE-8QA^ecp?kQZuZ9n4vd5+-cr^B$8*y?U_%R<*ypbrk&2^w83QU7 zz)OV{{ku?UsVVg%5RaFjrhsrc>5xLM?q&(OP*K>sV`2ON6Af#2f>leSIEO>JAJ11? z28=I{Rx&TlZOqJF_|uC@ZCBtu&dJ;Aq?K~Fy&$nI5kfxtHD@zuF0!7aB6lc!rg0}5 znWlzz<_{nx9@A~SsOXkP#u7{mH^`0F{&z+JykotA?>I|EV~~$YSifiPGw(C&E_$pD zXV>#VlNpWC5rqNQ4#bXhLM(JrETpVfcef+vKcc{+p$d}K1&9Is?6bF$_M3m-trT280%o>to|yn?%(@K-1@i0tyL$3qCf8l zO~2?h`a7`uFIujC$$rI&37wp3t}Bfh_BFQfB;@ftmm3pVc6klN#n6Gt3f#`BLiWpM zBgw;JaUOH&s(Vn)3%8RH(oDfhuv$k+%=hhjuuDQn6xD)PlY zh^Q^=B;6Seo6lg4>Q^Ke&$Sr4G@G0_X-zjB*d-@6ax{w|&$3z%L>kcFd@V!ZTdtp8 z<92HA2Rj@LXUB3I&dyis&)t`<&0DYodMWbAJ(otA7%DLxB}gD!dRb@<-4CR6DM5xz ztbF3V&=pP@T^V4~;S}i-STl)+m0s)Dm!x-m&ZI=qGIl6uZ8v~)o;UUH>o(z!kA4 zJEJnw-_(XxjKY98J#-kNug3bQfxfR%qv}(lj|5T%?Ro(%qgCuGsf{%WC+`807=P?| zZes?nIOT-0L**dai=Y=Tc!qNZaRQMjBR`Lpi}rt1QEjwafoEPBRwP6DjI@2jyq49% z3zyTO2g>P{a4;Rqe_k)8LhpG<7fK&$8D7fnPb{nkEH0y~>AacYG;3}&;n&rq z9nem=EFmiAT4Dya&37?Y{T-0Z$LW>urQ%h=ERNdZL}`0_R4tu+R9Zv_wRp6+xl2VS ztXqdv59*JQWBLa#Tqa@%_(6F&)YgS0S4v)uYDAh=BB&n0tz%InW6sLo)m%119@Jfg zv5E@WWxV*}IfGukkEy^LYd)0O$aE?c^F@t*MwnU4vpj6BBu?*wZ|$m6deUqktk^!)->yf3OgIZ;Xq@TYJ`~#UWKx ziC)hoR0eiTrSMB>T>r(5)`A9`N9QPPu;uYoMllKFYl{7w1Ahv4aW9@{#DC!^u!KXJ zKvhCF-%KY!rOqH@8-tt6j9Hm4ybi%SC@P^zp%kqvgThK9`m(gHZG*nUNfqOv?@C0vR-nna;E;c59 ziLxh$g8|mbmS7Iu<<#@1&QpZXNH0nOa z@_%~hG98gxK`JW9nMgl*m@sn5za&ui8YfWwh|ig(c@4NOK|-s{jHZu-@RSX4iTL)- zdeq1)^Ki0>_$aswf)Bo&5D$7;7-O@qkB(PG!lRk@9=dym*S;7)B6E$7SFDcdA0V9L zTsy())}1d`a!B1tBETcs-CxCtZtg(va*T)*yN8cdiGA7Plk3f(t}Qx4Q&Z|CoC#mG z-0j6pWuzrH5`<7;c9}UlPPV*${sljx)^<+=Z@2Kk8TDdoTK;Qi>LBlDuyIA7 zke82*IxTW46z`nM`2w6&K1d}lFRBL*ra%sTg=0Id7X#M#D>0jT_liSR%kNI&E1up` z*qnT?;7;p{-{1QD_1_9I>Lb7Zp_bmI<_Is_&NuZV+UP+XiQ~W#r1>ALXE1gllhW%b z^$%|$>TC)~$3v03MY9cRHKGiH-->UqA`*Yc{)1d&fzYyjnyA2CBixwo`E7?vnMbqfxvK_)l*p|$K`_QKzoDKo z(e(SXNqBXr!d2fVp1T9eOOjFoh`tPzfa}0@8|ke~=gp-=Zt9CU{qgAkpm+d&kbfPh zI=^wwlv~-C@LT~UZlNK;La#;LA7P8*TerUKU&qjw`TMuptS}XT1$cdhknbws0F9Rk zz|h8b-X~>>xL=8PM*=WIT(M!?CYeC^71FTU3=AE&T2_=fRD4Bb5?X-1L5^AYH0(z% zPyq5>`kxxtAF-1~5CDTPOJMMSOZ=Z%9oGXxKY(OkiRbeGxLj!OCEWfa7rTKW)PK1u zKME<%+A4n_a;7KaM=tuT00d|K)k+y;=^GbQW`GV_oHAPeH^}@(U}*PVjQo#6Vs6{| zaw&(j_^S*T-J27N{S*=xr;bg{7Ul28J(m(J{se z@%r6rl4Ss(Tc*ab{v&~Wtdi{>^IGis)fl<(?Yrnl>g~~U z&2+`Sn692jGWy83l5@I~s^jRdl4ix+_6hOn-5YGORrwd^sr9nI)0V?18%Urt82-rI zK1XzdMIx>uavM=724Fp{#dR{0*LnWei!$0|XIy@v?Y7yp2B{*~$R{irt$8tOU4lFC z27rQ$%&i>cy^mj^UQ@)!E%$ZBZ6F;0a7b^9km9-Fo+^Op-9*$*Sv-SCutF6szi*FY zfmeqH-!E(mEdR^@QDo}jxcqSU8(Uun&7yCKWOMv(xkTL4t=1_+72VAIFB^7y?G>kh zOi{4~YIqu3qw1;~=Z+zK761+Zz3gsl`62_4o%9bm>H+jhUIBax|5KlLenW|_w{i4& zuCb5pEWBkiQ^#@WPQ9jV(XDO!rCQ?gis*Qk51uVAF3G#P%Wc*<x|Hn0LKoRrnCtp4v%Q-W04AwJW&sQCKDb->5wto@maQGLEARrbL5s)INh%`ltbP$zJXwpMeIuTH+5CQ@!7J8MA^xlgElAuWM z2_$ro4go?Nzi*8FBN_Kda<6->HP^gmx#rAAeLZy+CQhakCr+?v zYCJSNapDw$`dT?nPyJ-dX-w_JiIXl)YHIqLYHED??r!!@FYHd7xE1Xc-Ka%taQ;0~ zM0E?LaYp)bW%OwSo1rTgK3})iKc!~;!NLV&2fzLbX!=tQFhjdf;~jjNKS$5z#_8Y@ z#)RPKg0K3De%?xd*5^;o_;uJ~KEMxiVLvpum0%?(pTe|Yf_=~na6H90pdIPDHr}aY zf6{!q_7!|`)@UyeG+B$l?jl}SLGMC|oK_v&#G`dLGf}QZ#I-$hl+=rMy6N{X7YAzD zGMIhjB;sB^sod)%e%i@6T-y_IKxZDjYdPS3aWwP%SM&b%lZ*8!x$!p&WjkH%N8>AQ zJ(cKSm4ES-`B9R6>ZumPSk~+qwVw|;1jRrGsGKXY&@c`IR(I*2aqXzB$fXZUNte}A z1g>sXjr_cEy?&h4`O|rixI0()_^)2fzap?oISe4eOak6?YL%Ka7J+xz?tfOp*#VgW zD(dpa1j%JMFpJGbW!+_8HJO{4DreLFKvdtKcVx2e~s zPn-;OI&q46b&~qxq`pp^pvk29pHEI9GHL&Hz4H4@_Ro7SPn=LWq4`kN`1Q%PNru#+ zhveq1Q0rV%Aw#-H`pso#`sXY8PX`z5v$I`NJugxY&y;Mu^?|POmLZ*xw8#g;Q9h zkw=W-qHb=sxz^RrTVAksQ;-$DlyFG_qDRPYQR{r6%Qg* z!h0fLul&;`A^s4MpI$QXbx>?O{eP+*br==TAS;T6kY8<2X{Gn19EYOci=kMn@vrc> zTLucwaO2W}#(wzr_fXyg@4zurqg<2HWQ3h9|DP&+`YrSI)(_qWlKsX+gK&iov$X5PrOMO?e**>mpcm)-MGx0~%waFg9m{D5 zhy#MW{0ZYStI;={qbZ8cLsxu@rd6i(tgw|KyO34JX^Gw;p;R_u<0vm2mW+`--k3sMH4xKu`$==HKQ83^#wo@rZQnrlLY3MtRfEqMz|D225#}o!WP;8jp?)LBQ z`2QII)0|jVO!*bcOD2l&$s|15q&=%kTDvTo++7gOI|g_;Gzj9{gk_JrY=4Y5t#}%g zgIDJM%TO_ze3togeQ7evFP%Z>Q9b6>rCv1{ja;qx;G!JQW2gMI+a&&Lf02`|4xgpE zzwVbicY*MXJFg635p#Th+eQI(zC@|X&FQso6M@GE>YP$aKxwy8R^o~m(@ai(TRSwB z{6-N%=vV^z-}uZf9$gE@c6G%UKiug`5EWSHPE2$jj)-O!cf2hH#=PXcTzAx2Fj(a} z`HOA^NjYX1q}xA=(F3jxt;pry^4XX`9prJ{2s}R2n#GzB`NYTxIxM z@0i#m%r8vCv3Y>xi3X2+;|h{~gZm9|&BUO)UTpQkhz(f%MLY1ykW49holvGXTxwb0 z<%nBJ`FvS4flb(CERQ-lR$wYpFU@A5#t0Ct;JbZ)&t`I@;#qdjm#yrQ%8lXGVX*CR z(UXK>6P$H??CmE-9fRtoaYY!i?^4>uYNL{+Jf1()Cnry(jt*hpv#)6Zg6d|D(GsTx z0lU!K6^AM1%8{IL+wx{1H(RFtET;mNprf_mL{NNG)7&a1zLC5ev)C(src*}Vv<_zB zNwV{cA#(P)jagkb54w-s?HhN~K=1VL2)l3Zj!nJZ5%nd)&pn4pvpkr5+2ZAXV3(*- zX4R1DeJIVp=h9o0>%KXo%5i6tgX3`fJhCON>0bYR-E?2SOtzAuwfRS5`#b%)5^J?G z^JzZQk;c73!iG5?YQ-!}R~!`|Roi}K)l=g7LmgkvGunYrnp!?9hk`F273haT)#P9=YlQC(E&lFdJ%PB z%Mq7%YLE8OIt}ZD4b)zp{Bxx?{48-}5=?%`?6*>h4%|1;Q{K6HuvWbcSQ=6plU)K{ z@fgq^$3C@39*4}=pi?>y50H+R8UL{65ma4g^yj|+Qsr@ggk03&CtkbZi5g%4xABA(k>a@kdH*i=t4Xm9b z-&VDz(X3pZdjC|F6MC`PhNLtKN)P;%+q~^a$9V#bt_u+X)`?g=>ALXmR^#`)_(GX% zwP(PjU5*POlI&7$_8#?)g9=eVT2AIbvSP&qFcW>7*MHf-#{^dARf9S@rw~_Wa}Srf zqTCau&I9Fd>BU{@PKT2^zN&@Jx`GGL{cX@N`aIIPEAxJ)lLUIn3Grw*cB*W;sHeI__6(_S+v?{5m2 zSIzgoKHFIX3#7nxVcu|oJVJMz73z8r@!tz{~a zrvmUtf_@iv`idA1!yB;rTwe`vZcVV-;h3-3^jJu_ci&_TgqAKSN-M`XCP~`wwi90Y zt-*=A(xcDtru7Y7h5254Z0jGRpM}4c@Fdyie9@!1N$$!XR6`A1anJ5D#PI|f}R8OyVb=w`Flx_D@ zTITzBCg;C0Py{`e4sQvjjWid|O@8p<%p<-g{3&!Px+;auT;?8it*Mo8QE|kr3RuKq zlDu2M(+2uhrQ_GOPaDM2DWh*I-NV_Ii}b%Xalb4!E{r)-Me|I?=NQ>M=XuT72PMd# zoG3pHcj=nMZ*0G=-y3@zapo+a^8k2AP+-l#gSvASDcC{JNQJ{Uidwf(2TGK~ZjDxi z9cN~$oL%t5>$yF5-NxcvDA{}a7X!Y_1?aX{>L!sWYUwJy?vv)sA*Ao?m4b;82O*+t zs4j#EZ7F0iahJz!>S|NMm+8Cnf0zlCzVjX1&DiC;Hna6kEFD@m zA7X>>=GboX;{ev_5ZYaY;?}-EmcZ_W5FlN_-LzbO;Y-nziP3B0fyWJ!wjeUQ5S;S; zv@F$WFpHmyEEYy1lAV#ByHi=yftBjRD`ahPC3cgHGk=(^jsqHV&iyf}iC9Ko2#L}2 z=t+~W(UwbILTOvjb3YGX=4AW!`XG!$nVu(~j=OyQyjhuQs&uL4s45rB#A^_jbPk^J zLy%xt^-Mc&;HTGbE$DA>pCkmzOr19t0a*x)&6>|6T*jphOGXS3>njZwbuWHd<|TnK zC5%GoW^83G_}zSik5Vm7+L8=h!`gqvE=h68VH6IjH?k@=(H%^Tm_ldT)$k>I@cjw6 zQ37<^_6tv-Tdk%S0N7c6O8$N4nTNDI0h-uvsMc5jZa<~mYXB%c z8CM!e;U>u4eJYo=;yQ4vY7ZBpZug{+RpF?wCZz`-U8ai_`kZh(PMp26>wS0%e?lX3 zcf2reie+5PJC5R5QSpgl=ZRQp%jBQ-XNg(4$WNYtW~BOeCAoLzns$N9kwOr{+Eyvx z<{viqDhG|Z^sF|mg^@CaKWkMp{D->BBZV2{1E|(~%ehJ3y5$2uuxME()!mzK1`NFF zjL8@T3tLLNS2$}+i=75h{RUs~KGC>&)MR9j83urt!pWg+^0e`MJPHaFpe%?tG=JkE}#Yynqv{w#h|5f&U3vv2#+VPjE z+Qtp?bV$;u?%3o1^j?FmvvtH2MsP{Lh;u_6i5W4rg=g>q6pjnNsXtgj9|fbb9)16u zpJH>oZxg*=n){?cUsZN*$VjL5g>R&%3-X8n8B!7j!bF~@RoQht6zMN|0PS*!z3DJs zuFmT_5WqA~wNc~Q-xmAQ1VR2B2SJQu{j$pa@za46wN*@M?32kwg+B~d)F~Qs)_u|o z)VvPTIBlcgd+j0y&7@moA@%wsc{+dswC_KVoh1R_8?bR};jWscfQ-9GT!n_YRr12x z_1VC2s#~BDa{3(IK^(2M=H`k%%5BhJ3v8wtxO$33MrK>`GjM zI7oFy_;jfMe*%Ylfl9(DMB78(WSj2KmOp=z@yO75sw+T)i{O%(V*KEk%Pe+ZxuWdy z8J@#WO^#q)-Av=k=kKVe_>f!Pdw(%s=H6H>T=vk#myi^M=oPm_D9nb? zk6BNS5xq0j|BI1NO&kTBcwtjNQBsp{t(Sa32I3-D8!J~nQU?>raiPV^Ca=Ii3-@;hB z5lwsuUf;cZphh!^4qf(rsV)pJKe5So+1f2Dj@7QjiWNVRf9&UpzwDm=+u^UpPMwcg znxsZN+HiC8I^@e=ZW1pC?je@QRF~6zG*>j&xX|#~LVSdhoJfD#1*f;EjXdLjo&%3l zs4T@KPu+_*11?uCXj`;Nk9AABkJ=@kqDR#9C4zLTLj>}UYq`r9&-go~`Ak7Uo~5_A zRiET)u?^02Q9UE6P%;AKp=&vP$SLK#zX)Dj6t$e6H|noZ&vANFqL(Izs_pI#+OrY* z;O#<7uOxt^$|9ADgP1V~I=cob6!_ZoWc?eV4Wz$9t+PYZOZ< z-8?>dEOfC+91RL-EVVKBD{_x46=6P9O5|#{4^_d|rr`2@p@=sy&(K$i-Z;S0a)8JJ7Ug z_ZMk@_&V55Wa%38-_%EWag#VFwRMBFzb)Lb+ z0{8(Q3X~=f)H^SO9s3KBT#9}=W(QmC!|{biH#>Znq#1=jCw$HRHsx>g#j_HV@F~Ib z?h%>d<5uJBeNc}4DGisfu#t0dpMCqvT@}5P{bWy+m$2IHUM_X}Hfr_|!C?Q~o*K_p zhk#aqDY&>;E=wXLtM>9pVbX|p@}E#`d%2^g8Jnk@Vz*Ld7laL{lyWa}9Lx$@h-TV# zddIoOs09L$P0sQtTrsh#veop(PuANWeltKRJq_|Bs`^DRY#;%MW4|K!EqJ2{ia_-T zqs5got|va`81$Z*3DW5O`NSma;VWNqDRu$9m7S|b_BMzd2BQU^9@jlhuW7oEU^w&8 zWy0vI+1B&))EeNrk3MP!E7@(W9_XZUHz5b@Af+MeH>95RUGL9)?$rjGhT~dPwBG@& z4U;SdwuA4*wbbG;mICS9X662dWme{ep1XsFQXy!D;l1V{+A8mLIrCb-+O?@#OD1+9 zk@(x(F7+A8fyX-8)71`1snh;9ut%9P ze`p0YQN#FqSU!zp)&Dfz)R8pAPSSuny_A<({&0w>IlSQC!=8?@|HFBqjufL!osawE zIlquUJdV?RQGC>48G~~;{&$)GE6x9V4sB2qqqAU3?;+Jq<8Te3|F2`O!FCck4QX&+ z(2Czc1O9cOgD$b%MW*?3bngONCZ0zoaNBpb&3!xWZ}wiR@9LGsnk; z;q$xrOll~(JqyDd=1BM`*8SHnIv8mdiRr#ru)EvRTkt&RKN=SUXy#gW4`kR&xBK?CwPzJuLU4wfjzPnWMU;3+Z_R<|Bv8U1c`No8T$4!PQXR&`d9~!q$x~X&@ zaSvH8P6CRUKUUsLV%>KS{D=+s0=>8ypHJm=ql>_~BOYu4VO#9NuDtldfU z1xP87KL3d=ImvH)zkEYj4sr;dwx_WUIC;k?!5!;N_mx$8cyKaq6qA`aT6cXRBd{dX z^he(p=ZRhRKM|7(dn|7jI&F@YkZ;228{D-vg`Wu^QfCsp@SfvjIa|8M<0I^Y>wn<; zZyZj0BPI)e{Kjds`$!^9w>BEYX#NPxy=!>dZ9v<7;pVcP(zPamKl9_7w=(;$9hyqM zzbKd4Ir+rxOb0L_Y@Fk)zpwa+j?2(t3mC$kKJ_Jx9gy*l@c+3&ojPl7MDPR>Huv=` zxZOlaqcW?Sq}?(mUXPX?83UAuc zSO%~=bX^L1`{%*qi@JA8r=B>zj?zhFMf&T_LWodt?0zEhvC{ zuaOU_fVAmU`%^&YH3EW+Bldn?+W)jiQ=9(r#7&AzcIcnmbb@B^lq6;@oW+-@%cXjd zr%*g?!{mS9)x|aAju#w%MB2+x_?uKZXg01y$`i8HeTf3~af-PHw4e>k2Fj~d1P%yFLx5&$;;e67=?t~fE? zxe;5|oJgdZByX(SB43k2|00U>=FQx=S8h0isiO{2JE+soX45JkC0xUOfxm64WRUGg zp@ndafm=iU9L#NdVti`uZ|C8>hT=(|ruLt2CQQHjQUTxH!ETJODuuIcY3r@p!@Ce< zwGwWLEp*36cg5c(rQh3VPIMFgD`&|UrAgCBT#E(;9H~jW9SF~#!O6T}g_fE(s;*|} zQ|Z>*mkfDc71?O?-jd(=kO7hJdUolr2dMCkeB0bq-|9wR3i6J~^1?<4{m;cd%jLeR zdfMM{n~r{3=3zy3Y|vjeDXNA>^=^8mKk?m9!*-yuBV>5BcAJ_zHR1|cbwua|4#74q zUDW%UV|k_qcUB? z?jsDgKJL>t5m)aDqXGFz!|!;!T2x%L*1zzNy8r8nZ{d{AnGSCDuC>o2^?`vs`I{JB zpBZ5k2g`#TxwyYVaJ@=V`)gL7eJ!u3`QvYc@_){p4@_q$i-{d*#!_jIDwFi;zl~!H zQz>fme}2~g(dUgv!u`AQ&5Zv5-#=Fpx_lOEf)>7cULp@#d<8TR)AKGc9y?b2y}lElJC2e_|I;FO!x%{jX*IkLL0-zu5Op5J}lBJgj|* zSffCS*TxaON|`E^vxb=ug@nw@5yr@UCZWLREgdX3GL3Cif4d$k+DX$7#fqybet@~y z1%>YhIOhi66@8f<(o#m?&!$h8Zk(R=>SZKz?lWnzaHZdp@rLQ?DO9TJvu*$5g8cWA z_#OuZ64v8eZ60^=;^EgCWn5lH+%~JKxpWKIHZ@&uSG_g~Yo&5|$Bu>Qudzc;kkZ)1 z=y0HXy=JlMkC~s|&+_$q{yc%%wE>girIf2L+iwMh2Hd>0gtedv3!?Cr%e&3_7 z<8j3KFA@hv2OcYi2fvv@2%&#_*c3g#l(95gjt0m-fqAZo3>TYrU%+-f`p0kiudAqG zn$PVKaR(t8(MR%in#b9nv`aDTx!bbr8m&hCo9kW49Mj#&ZpgSNMmM`s9WvtR%8?>0 zkSXC|ORGgoWsDB;N+ig-WZljS?is!>!JNGEZuPV{6svG=3^(&V#BjHO)f_6CD$abW z=R>&@2*4;i-j^nC_v3A+1eK$*rP7Grqg38ixHmcXk-6I0r%NB$_FvCC{;ryU9<7%* zSQMF*y8pCz`Ro4!wQn+-*OurzvM#udKB^rDiVJ!pfZJ@MtEN&S_~t%t3Ad4_=7)pw z-sbQW*F^d>H|An?a9baDp!QUgui9h(mD6I}boP(kS5DuF)kz=4`XCj2d~4Rj!uGXr z<~7y(aSV_4gSVl{e8QopFVzN|7_SA3;^E}HRIkNf%R3jLY?*b{mpHc^hBx3kKm zGEm#|v5{K!uE6y+DsyTv^?GhSOh$*9x<@L~j1%6L=VbARpVzSBNU@GQ>z5Y)QrsW` z)u)ds>UIlpfLzCTo19iicCKgI911#`8(A#K4p%h%WL(6rtsWWGpk}S%V_A3=r%45g zcyYN&pJZB_%imzS!oIT~)z?UF@S`&SqdggdH06(gH|S*$9p+4tolS{>MXR<3nteLz zhh5h@(@*#EH4Z+xf1ir4ij5#CD(`74NzLzR8O~nM0aIy?_)<$s z0vtlTnFBZDG9W&@Ub|1myHr8R5oa3n>Xk{GXP_@1^T$kKA5M;k5d<#~H7L4z59Jpv zSY2FrK0F@-^h5Xq@_KzQI@yhd5z3ALZ$r`r8;=z~dWXmTv)CNa)9>roLN^(Z*rs(;qqOZH_+r^&HE9iYd@q$gsx!G`R4Qg=Q^J|hCbE!@XT!>H9?w4=u45mYNR`(;9 zYxdUAwDSeo{K{YFRvfG=%J4qqFN?imadjs8gDH}Cq@K+>5Qb3k!lve`J&J}^wv2^O zit{90C-lplz205ArQZzsQXL3Rblr|*o4@^}D(B4dv}phZ8r?)?0Fv&`3Fh~8L{2AG z*>&G1CpFylney*;OFi6be{SCBng%VR@+cjZUXk&_VvRqgy_FTO7l@Pa=+um*&gDUy z1ygYNO_m4FMc`VGHjeymsr509@4>&RIf9t)a z{Eb&$3nZKzwQxwCEM4n4%H+FUcw^MdsuKmw2bUnKRjRJsloM0+;XrCky- z{8=yRlSeO`58X-gmkY=eu|!O`s+X7BhjU_}#b9L}0ZjihzaB3?LL)xV(Z^{k>xLdS zeNA5F(^ao;6yFj->=|zNeM-E??Zf)7LBmu!YKr8WdR?1hwR+#U(8^rT`E{0+?}8l$ zUi3?`XEbtY`r;Cw^go8)(Bx@n2k{0d`nkJ*s9usci=b6ttjNK>;=$1*Hdk*PjP^>m zjXc^18vmdH1%wM~PbGr%svk@4)i$ELm=q?Q=y+J8>CgIO&MrDy0+bHE>`AtuFAly) zsJrVoblSY;^+%B^8s^WJ#S>aF1kxVYJQj3$q`aOAw{kUZf>?{dlH?qmPit&ff)MqP zj~TWF${8r_&j_s8(EVUdk~G_!X^Ausv=UP;ZPW0f3H`{0vSL^?SRa#7Q6J zD6BOvk4f1{cBn43jwY-=*oX5QO4%B|f=5y=?=WoNE(R!@eb0I|r?h8MO#}}YT?3cM zaehHb9)VARUH(};B=f;j&+#Y^V{2-B%*VtVX%$+OKo5#M3eYIS?lIS@SbP6@?AlQx0G zsT>eo<2tG3id?@;zB_P@#Y}$d5HhQn0R6|Z4Y8UX9ZAZ5gX9|{t6Z!AZehgRJG@V< zimXNq6989kSRU_-sxdFMLpIh*cn`4;RLT#tAOXH7Lmjler~I9^;Da`8qiB-E*o8FC z0)a0&T`7W*qdJ8?P^P(+{4{|Y*gEZ%eRyo7;dP@;V>#`1a;w+tw~mNT z8*;(8`B<)Z#^}sc(111>uT*|KIMvt>t?G~pzL9ec8*%*0mBu5vbgB1KPEgT?&lKC` zT@-jQ+Git2l_#yJIEV|m+piotk0A3fQ46>P9Z6|Y{E@%aH`W(sgW`XXH0T#o<4dhG%n9G4ltk7X zcMR+asVup`MQ#N^zp@Xo^==X`9*i z2eI$jZ-L_$UU*$HL_w$EG3!_JS29WfhPeECh&h*5lTzLxrN$YDqRA z-Z5Aze47e3=;sj55nMU)#w32(8BacHKNVI6Th#*pw6b(?&aAUTy<<-oZ!-zG8hBLu zMDqMfVxHd7k51RY*{rAw{FZfxH#p<&Xom19`b~)Kai#J-Dsd_zof~NKMUH%@zqdfO z5wvzA1K$aINpnqj&g=JFq+Yy^k$NP7bh*$y^uWz>YwcVh4jvUPz3%n%S*UlPu|7-K zO8<7nGqJ8E)BvSJH_DJ?QbE6!9`^G}k0kUH*QgK1&-cKbT3d)=OvUl$1bxNv>DS1nf5-n0$^Q&}cC$&Kjqez^%5ngg4oSN?YKB&$s zdQRbGIZ!?1{*xl(SF0)EOX=E;O58$(Wr1^AaKCn0)oVu~d)wU>UHAIW+zA~y8I(C_ zlZSGtpLEu3h4mhU0C#$Mkuc~3_}qGPj4PPbCsl-d+4qaG%G;&fpSEP{)ZCNyyMT7- zoD27boe2-xHK8=lT~wT~shI3a4*hmftx^idg&g#Q_Q*(oN{uL-D5q$w%$!_98MC&( z!SWos`TcbM0dsP?AkY=wNu6|1S!-@kXYqKA_o{Bvw(p4KBcRR7@UwDRFVCO7a3bMs z122^ok0W4f;wcBGdQ)jHR(v(OsJr<9IPE|81Oh80@4S2-Lh5sP0%&+W$O44mt&tVQ z0u!Z)$_W|3BM!f{L55tNq!wyCr;ROdB&H9%i7u;M)a5*datgc2!nq%JMaeqo+8??!+t*P)0fdg{Z3R&dvZ{X1N!|JKz!KU^e^&mPkLTbBexS} zjb-rNRo~FLdsvG#n0n+ChBqfVzjWoK+(LOi+b+sR2URx<#CKgyS;ZfeFwlh^ZGcyH z+w?~{Y&JLVgMYc|;xJ`^kp=u1yxR#U4rYo6q29EAs}v}G(`D(76Q!^lKFU8#64E5K zQ>bcvmG!=dsq2gIzCpSTU5js7y*HQ9oo~6tOjJX-``)+ot4X zyH7Ab$I;9f<>^SZZre_`=lCBCM2qL?tH*k+yMG>y2L*h*`U}~@scB3t+S5BK8QCX{ zuk<-ds(3D?IVqRxeGlZ4nVt}`g=L97-q7<{>hlTKO{UDy5;YP2q3Sd++=};(J^NeT z&K-;<9zj2or>EOkFBgN7m+P!4J=sMqPj_VS@+{gQ?Kvq6bLtQBC1`RENPX9(dQQ~# z0Bwsp9r6p^0|l3#nT2x!*#1WYa91=*E5C>y@4c=?PV!)~;;Ns-wALop6=o2@WesDU zW)05q)h^O`7U0yRpdF!f4I(MLQmp;~Du4p>8?%-70Nyf})mk z=sEKY_1Gb=zSN9f*Aa*aCH*b;!+-icRJu;DO;-Xx@o+0?*mU)BFNEA%wOf|j!{A!3 zGQ}4=(jlbZFSN%-YgJ}`%ahAPy`!7F13YaWA}zs1pcpox2-cAG<&@}Hlbyb=Hr_ieVgLg{DI&P7)w+AUKiMRJ+h!(Z3h zb;VV6V;hdYM7~xz^Ue!5^eV54rT#Lx4A_`3R1lyu60Hj8OnDZ5c(3rI6u++eiAP$x zeX^E4a(2R|Zz;cABV3*;eics;3Szfa>sAxqa?`yJFZ`>p@z{zYhtdFY&RzJtSLKH+lwwJ;?c>CN zT>&xqGRlq=dDEzJV54-<_bZClSWVo(FHZ0;Qj5TrXN`WMSXx@1L*rvwXTAbzo=-2y zM=eCcrSyXBhEVpD+zQ+tXw$7YR{ZF*{ZyVBfr6*}2)ye=M|2cUn=Qy@708XiTH7eD zc*X!9sYkAKs1iGbh?VV+zhOx;Cs>$Lz#|o}s`u`h{Di*l@UFG_5%?^HDwdo{T3~l~ zn&)mQ`5C=#pBx%^wGN18q5-RDganPrKu^5&?NH6?Y0=}nrj|bg%Lpi|7 zjpN`%(sALrmODnm@cfT^(--lt*8VLS@Lpx33Dd)w!63a*W$$S#c~F}oUw-!V{kXxr zmI&+@gB*PGGX`6O$7VCa+22dUcrw_qr6+}t0w*$-$NQBgsUhtGrp#x=2ucm%xlr$@ z1yh~elaNt>oFrYO1$biFGfP1BiKw~Z6r%96XZ__C%0WFScL%37-`iKO`SC$$KK5Yr zi!w~+MTeQ~=p-Q*bLpq7GH?1uLEwq$*K@rDlDtEj*ZWj`WcpRh#=3jD{m|4=>v~uC zXdD~n!2upl{928#uN_bjccLoLx$n z%LNNI#Eu)%GT@QQKd5_H3ci(;16V^#eVjm7=p-T7#d#EZOfnwG`nr#4+#|l)Z~^+1 z!fV^iOFF|4Xwq;5Sfzp!)8|$1>Me>asgHXN2K(5MTYD5$9?-9-UTTgYBC+ekk$vK3k(SiDV3p50m zNfA2j&^D#4v|HJg>1$^`y`w{90bl3|xnW8Q_ZS?lefOgU*R(Af?b9a4a#Nc2e^g^S zqO#YUhKsQls^#f~thq%PnE*BJs|&uJ(?TO|<&owICF&k{ZQ)He!3AgP)7wmH-cSk( z`5&E)Wc_I8>{No#?N>GpII;^0<5a}RKE$4OFTa?>oD6!QOO|VRrIsHtdgt>i9D@GK zgz8Nr!DVP?jB?1Q*)HUW#n9L(M4p+^>_m__uwWvmx=+bS3vwAr>E3Ivqi40Qz04aa z=rU6A-TIzTx{X9}nOI&~wy>_;%^w}tjPjj_yX?^c)aIkF&h+}6&?+-d(pe9aT~G-3;)OkS%|DbmwK zM+-l>&3ktFoB9*r(AtlR7UKFeZvs2uv0o&9;H!&{5p-u%&Cv!o_*ISa)>uTpLcaqs z9UMc;Ibh{4X(8DQh1+mtuFF(?9s$hxYziRx#*SiR)o4IIgS>aH#*uN zI^OH}@r>h)N9#KXsFO1DM~TSx*_ogay}hhQsEIV4jPM^6#WxQ=6Yu~o_@Ys~$UnjP zRT2H|Z+_l0;BH=|=89Z&qGac-mORkXxNcmL&;CjThm*Lx`m0tn{tO*rR@R^jGyJ9# z9YX4))+2O$9NxgL%>j$c}LiWFEpyuQwv zmYfSk37%Wk#vYh|+mr$S#6C!p1t%QYDxjl%rhEb3=I4Y4RFX7d^zoY16lOJMg7KVK zk~z87yF)4fo(WSThuECokrP)_rr9)r_am2*`c% zptSo^7x$~lVvY83^!e=mhhakx&64y^dY0d)r6yS`F1SvGV7}$0J0~Tr=8bP$u)^6~ z(aknGDUiEs?IDD%!G@K%EghGTM9<){#N0Bk39WBR!U0~@^5qY=YWk}P3#Ik%TZeO* z%0lI?cPXTwz2tM?ylFujKA2ZqW>GsP`;NS`YFqUWA^Gy*X=4|^Xc3;Rx{@z)chR&U zzq7jSRs*viDuzFY$!@Z_NWPW6tj%4T_+o{cJ}@`BnvpOC-KbV)`SK=_V5AxE&|oD}#g`Quz}16!q-`7LUL`Rnn>S_Hk)u9GtfC3t_8 zAFKBM%thdRYjjShQI2K3SRc&9uIp_dIKmECXhvoQC_;;7<~xnn)owzihtK*uo#@HA z%q~R)7VBgTIik{`5E~2j?jb-Mk%>*X1Erie3bd^B&vm>bZs;x^Ri=8+C-ZAqtynx2 zy{S&Oi#K$dUEF#$C1bG1Fc%eE+?`me;p1Tk#8yvOB;K(=@l`-p3&_>1>mzHouq#0u z#(AC`LKc!x%KPhG6O$Q^<5Nx@U`$y!X{X@3OKcNPJK6ug%X@y8a=0T@8t4TzRl#ZH zc5_P6BpL5$e~ReExMgAGr!1$;m7axNs!Lxr8zZGCPF>RO5#&%0vOBzRa!c!m(-F0e zrhDdlzqT$mYmiE}Rd1(v52}`bwydkl_X#-My0uj3vI2h;UUXF;AM55A?l~f^hCH|p z@BIl)rN6xw>5nunpL*p!4TY!i0~v6U%9rKIf~irL2M(W+&Jiw|<_|d7^ryqK0B+iz z&Lf)-?*{JU9HVb&Cv?RLWyt3@Z2rjWt1IZPUj3i{0)S+@PS)h#UzOAZRtp=VTG#4b zrg;jr>Jp?}M>=FFHExJ-PMwTJ3)=NCG{7TbINV526O6OOIYny=oJ#Zb>}$jaoOk!n z@{UkOl%eCM#?7l2?~?<#NEPo5vfIYWsWgK86HZADW*R-pv&Tc^`%u!EC9|U!Ps*%1 zM*(dt>)A=O#YZW!9wtMsFzhQ2Q@yx`M&0W50F}3$XdkG138c3Sk4V`NJ3EdER7X&h zO8Y)y)WrU+F;2a#xpG~;^O$?wHmLT!w|y??{keGBMsJPD2uh-PzDAcfqkn^|_PT}C zLb%NPj#wyf>@mjAx@Fq`$#Y-M)Z&kE;}dX5q6oE_*w< zCA}*IB^hhPrb7ZMMt7mSgt3)ANyhL+(NM%af;A(rCt}Vb-x?O*1qOaoL^PZA5A# z>q}TJy}S=Sg=;%x&vwwnv^G}SMXrNc9KkQ|opaO}f6D{SLF9Z@Y+8Dk+QQeHlCK+& z=^x@3zQP^%{w#FzfCDQ>x^D4LEpcR8O*%(T}?DqJk>Ccjuc#ZI0=Ed&cL^z#R z+$BY9blhqGgNgO(J~^OXJ?6Np(`gKAWZN~rCE>ZO?M`+9=D4QWlwJTeJ_@-e;|Vig z?A7LSVvSTAj>(-5qi0!HQ#!JuiU5h80^Y<8Rf4f=b+2nnzsC^g+Z1^&y;y&%L5oa~o4m%PS08WHV72OQE*c*+eQkIe?qD1#wFT@Cl&{i+S)l zpwXnDZZhkAIg^Zm)s1$WNJSwkha}y5A5W@Y4q#npGqt+xr6|Aj^xp^?_Z$^#968{q z%x4Ts&@C6M0^7`wDs1tsSs_jG_ST;Bx*HN3=VT?-hgi(#gcA;uWoL^TPFrTG1krdm zYlm37j9cG8mtMHkue8(jqUKm1Hytr97w0KBvH|>E|^;}x1}~Mwb}=VL7zP_T&a|A5~rR(ZPa9drZQpLD91j44w7}A zPR2wLapDw*q{i&oFG+r+fP;m^u5Yit3-nBQ%}ru1fHojG*`sC1UpJj53ce1Nn5#*e zN%?jSMM*Wtd49XOmMmv>?&*L~ragB}qP1VE(ngJuUi2Z|b|>{wr5Qn6*Cug;*(t_F zo}L=kRU2_aMq`{?7&Q_>@#;79(}g`5&xD)0eN_Q@#og(O5L-;DY~NlA?pv!xf4|~w z?VXEuWiDyXNAixr*_?5%lZ%`Xi$jCfh?x9zf1TnHP4(o-$^1lxZATCUlwVg@c|0x2 zvK%iC(mPh%EkAPm$e0PvrE;GmxC>(po4o({cmftPBPK3(?Oh91%PsZu#qx692jYmE zkGkUfqdw9PAXWpa6~n8hbSHD#S`!@#;9*x&fw;hTrw|hEC$FZEXdKdCC8yr^|Dtf% zaS=n3t6umT$`f1v9(j`*f$6b@dFO@~n^dQlnLaQn1>V4w=G&UwFEXo9LjjMe|J!@W~vJBwT)Zigljca1R6*cYm=KBfXnIQGl3$`SNV#J~0x(&GVO#rl1 zx#pts)9u;I?6N@$iuK1^)?Xj&1}^(ejp|A|+S16#vGu_pe<90xKJ2;&I9G9eK60vz z{H>YtJO6bX;f;h)*8N@g;ozS%;zeL zEw%vt+NT@t)<8@N0C}CW*usW$1d^#8Gs5Hrm-_C~w1s4F8NvRu(lf$J(9!%cnb%vX z7*>};ooZs4jAamVjoiA3&3E+eO_7QZ!PGqWd1I))xY8}|l(~}Xfy(QE;jaI%8b?d< zdF`ehoLTKpWVBxK)FJX8$}M=~r=X~E3SYkPV!AvB#bsHeHQ*@YJhV#q-%D8894F>n zM;%0ct7fb3N*ut7=O@^$wTZDjG896wzsFK(l^c25u-k$^M|G)8{klX@xxjWE1E}Ws zq_pFV5rG#o4W)!g?X7b7`spv%6wYL$wK#`M1VDYq2KMDU!|J)ykyBV0TRKE99fqY?-)OX*zKFP3EfY{wX@S0fUf)TX^`ABUb@ zf0%!7<&vh`?I>lWjPM4O`|IwShqzpOZABlOc2|l_`NVK>qv8X%9I2!ArG{g)&Nv8d zLM2R?82y4%>0@>yNj*ArrP1#PVVxA082~O{k(A& zbxj)1=3ub^1$X18Cy2p$OXemnYX&i@>({%i%FRR8+>l4N*2&>$P8Si3I_7L{{D)h=1oyV^0_2nn|SxKI2EvHrIvcSE(Ao=}q3H5|TwdfHbv z&(m3;-P$8W^4p4Zr)iC&4*SceOJ|d0)^1Qsr5e4D=z$PTIL__U%1El2P@>e(8pz{W znQrVtQ)7m0?zn|1$lelbC?O8CZ8F<3?5li{lm13Y$RIyu1%3c8ZM_-7w8{r6$r*lm3a>la^d_lge6DUdPMeQqY5}-cuqPBXk9GUyDbJjbvz-qMW8M=@a%JR|Ktvdl}JJ+U*8LAh} z0;@S?YUf+5>|%_yNxO};IO&eZxb$VDtX(O1kd7iir-$+g7d9=6!C4{CNX*?^-AJ6` zWodS)&70?O33o5HUjOn<8ANj1kOSYw(HECaV)|dKpmKAcw=z9L`%W(D*1B*T|^_ip45*DGufZzU0D`5?(zV*a4LEbSck*gV56V+w;nY0?(FFokK_? z-QC?W#8B_0-p2hs_rGtw|K7D`xn^;VGiU65?mG5y?jMM+`zcj|r*%)yRJCum@> zB-p0!znd;kxe%ltf{hAxyua~~&=sY5Ok0;OqDaN~X1@LzLT`LoNeD9>7jnvIglcEp z*ZIn`8aX~Hn??LN7BBU!HiQiwo0PPCa>cl%%LaVO801CuSe5e1H>*r^C%Ze z-?L}SC4SpDG;E};noR$50wY$&mo!R>r%fosBtGh~%kl}5as4qaJy@?XgK*?W7Zh}d;3WmDZ zx$G9y{v843NuAXOE0?K^(g>1~k)JYpG(O*Z+y!Z@?3^pmB*-U=$QA!Q?If{F`!!0| zk|x9Y8P;2VBoyb2;efB9YRLd}Y0AAweXM2gFf%vfanPrUQm|&dw*s-d2a+>%l=VB! ziW4kTW|H?}MHFJTot-+agTEI(T27=RAoL47`U9&{s!1a2{z;)qTkNu8ab3OiWSFVc zc2H>Y_Dp3GVim(S^*UXk1YM!joR=Eh;;S&LQ4jfc*esc7a0WH#xIl6?^alCGmjR#} zJ6T&$FnbJj(46!BjUAk>4*F@CxIh+!@3uwHsJFBZG*3GY)PObAOu(CPFfNVPS%$YK z!*sYnb(Hi8%X>p!q|krkxNfsONtD+dg^P)gRx`ek|sm9S1joxbTtO zcTfj%imnT?Eat4Xcl$Qyo%ZhyMp0_(X(i=vND~$;Oh`}}&fUYUUw`|jEcs*hB**aR zmDLS_@dF~wwk3wxRsHq&qCJQts7Xx#u4{F$BQd#mxYj*6gF=>}L0GQ>k# zPKPl8oD9WiJ{8T@^CAFCLb*&aK=s?Z_<`?rw?Td}%9QYt04RjzJWIYU@?bC}F0Te} zX(93cZhiTQGx8B5vFC}%`H#jH06CD02ykshaEtK&4`vrwf+e7h4+{q;;=i6Lhycxa zxii!--`|zEKOHEjQFq*ns;jdD{^MCN4IDf~VO3`EPh-;`#Ik!e(Er!gCi1`CzCg30 z5*pA|NMrBk@*jPUh^-U=e;xw^&7aD^A9!!NA21vx0G#r_&F4S``g=d3rKLRXpHcjN zN1_Z2C!;$dHR)fF+ka+d`Ca7v{k#9`Sq>R69OXUC>;K@pLiPFH$f%{I^cN)dx4Hqn zUvRoPR#_WV|J7HZ1+3lDQY!O*J^O!K`G0FGgW1ctVrg-hD!!FwX#ZOee=6e!oN!hu zzWlx4g7bSj1xOGO01;Y{P6G~(KML~e-5~H#ggy07dGn9L2p}T^s@WVT8uef5mkMy? z`I!qp`mfPoVgh>diTw-1f9ZlS4xrPsK6w8%8tUhO&>~piY5$jC`Y-?wYth*LfEWL~ zQ;-7$8hMVj_P-P!RU3F{^X$WaOdl~l8PH73dSjda)V^AQxeev)UwK_$%=5E(SByyl zvp<~`E2!>TK(j(HtwdrEFlnB%Pl-Q%CcXGhb3P`-(T4EXmWZRfJ3tiC$^CaT(a!yP zeKmeAzy#c(Gp_r?U!MX_nQ!QbFrC)|bV~6}s@V^metwJtPEeOJ9@+G-IRqQPxy-)_ zTG4yzUPONBHbnS+E8KUm4lYxJ8+ey?{#sxIsitefGjWtpXy@G%M&4;ZHq^?^?b2W1 zaWw(MU#wH^0(Z~s=bw=xiZ0L|qzT;puIBnqc=whDyaqBKL!1aO0WR`@dNm zc+dQOU6Xv)1YqsGHn|_3m``nukCML&u(b@5*4F@*rZz-sk3r+_cgK59+ob z5)oTmt1*zXOa-jgkNkn{cjCDRz$cY^yUgRfH5(?tHvt#<5ET`j5bc{W(w+jIj3@K@ zk3Ppvo4d!7JaZUrPFnTQ3jR$_%DL@izTfAvzz4+Ez>ge9JR`@qf%UVQpX*%rhx#bt}BBKJSe*)?$R$UfaHA-i1Zf^t`R9$}CXZ zdZ$eI*CI00q4qNliQ9hezB;--IuZJGloo;OA>I+k^;fShd80roWZdzdtjligwnNYN z&(1oEs!bpFj9cy3d}DIi51Bg5OPX{9QbU^2KLc8HGSq#>>Etk2MOX+AZ_xbV+Eqxm zRKk0+>g|#*zwHOmI$AH7nY{mH4>ZRmXy|e=c%-B}Z9i@;>D0f=aBMF2p+l3s>5+EKl2{Uu0Kzt?(RsPXO0 zYDguEm1(*i^`CA|@c=jW;Pira7-1hcf1W9GMjKl)G3PC8X?EP%K2evqcQ}3hbc|OE z9X7AF?MW1I&I_mO%bp?f<0yegu6E54`|vV5Q5V9_n07hEXn`;2xQ2JN703KnNlQVM zq+$M;H5$_D%YU(ca?(JSxAUBRTURyK_iAC>8xO&T!|f}mBOxbDHu!o~_T zy+=uxi<@NmugL+Vp~*ffInNOvmg_Y%%K2natk1K`Y+B`Y1kLJBnY+L{BnQ{#%LJmLcUCF^Q2eTu=Cd>j-1dPQQOJk{#&7@_ zB*gu5q8&q>df7X82*IP5q4G_a{+x+md+mKYpvx~G?W-e2BeXk#y9(Nk1;qg~!~P0< z+~c1m&#ONn92zH`bX`a$;5iR!D7s613?75evYEHgWScP*yv)gtA6UVanT+&~%VFBe zjb&T*s@IQA6b9_I?IB}q^Iv8=V$2$kl6lUUO2!U}!m_Ukc$Z87bla^p*QK%6!PAwyN7rZb z06l?Fiikhyt&?>*e^U#9wKVK@CV+0`s4Ds( z?!D^lUZ6ut-6fvsl}dpQ1wcbs4>TO9`C-%30Hnif1k>5_X~CE4nDC}_+O*RI*LLNkes1L^ek9L6{@QVf+7qZ_V(xx+F$ajWOgSTuiMDo3gmtIYkJgh; zuSbltAK|eEGaGhu*nWCBGmg8GcDn(-xxgvaZ#xQp2k=r_1C=omg+fzY&WmtGp>CUw zSvwxDOkUo3_IYO$UB!+-LR@WZN+~a72UqHj)FnR3F*Cwh<#h38wb-o3za4=)a1x|) z$pUI zz^qEPHxOAKIe)o5QFOfh{3L!E`wEtaHA1tnv)$gq*hrQXfM>S8X<25}Ytv07n@9%U zgG|Fv_pOe;iE^750?1 z!)YCVTJh%5P-$ZHE9X|z>dy*?TIFXPXD&R`ndYS1x74)n!EZ?Z1Rf;8esD0?g|)dl z-BS%lX}pc0{n|k|ojI;O6Z@!|<1kxpF;ApRI--|V^W7bm4 zem*%$WyEAS#C;F#{rC1FSgTxPa@3g9E_B>*r88viRs3}jA8dksBLi!1YqkSVJL2V$ z?D1I9Jju9b5-;%4$nDRFg$gYRK*90Cnl|#ea7y70i0;|-J%X`=M03wPh{xBQCTc8iVdB>|^yIe-Ppy-3)SsniuXWHwEpU|Q558lREK0QZJMASWEGf5`U-YS3<2@PpN_wUd-?be9IH{OvDZ!Q|snF*%Tx;>Mjn;CtCiy3O2B!Hx>YkJIw!S*9n zgKX^lG&DxiBjw`THrvMY8cmBD5kD;2H9ofK+x|JnCNcT)H&+y-DH4n+>=}qd+~QG> zU!&D2n6lm#6h;-Krcss`6v_Df+!@&q_)EHY$eh!DgQtgw#T-&u%`cI2uzlQ+{g z5ZszHN=jKzbPAln2|?cBHyM-V&mjlDSO8PM%nkss!1BYHy$>zl0jBWesHX%jIJZL; zJM+VMVTKV>hjp{nbo|s*rCu`}Ou#f_S+RwKP7}LsHn8eCqJgoTUZZ?s!yCxzCh_$i zT(;dbIP~r#^}Kwnm??v$KdkAM5j`j;yfv2d>TP=fTB;C;cY2w_qGsf){k$~E>TsUR zHOR^wgGxG7S#3pMlEP5A%K1W9R8eR7TMWxXehs>l3an2{kKc%_nitQpV9$9y#HEX4 zH=PjBIAt>%6Uw}BRI4&CsAjD2>@woG=5eZIk^&Cg4VGq6Pnf=@LGP5UQmD%%m`@qC zB}024DXgyrIk2D;+sEkhJ^y6D6F}26VVPvhaI|)1tBkYT91;nT?{YA=-w&)fpMsdx zVOA(n?bOMbDH`6|Tp7=hi$0&FsjS|cYtWnvJ3q=Htz=p2d~?duels>9mWT{7=!CNb z5a-g+b;Zezsv=TYW~^LJ9k~loA#^6JzYN{9b1&B6rhz(-V9YW13=OX6Ql5unm^OvHJMp0 zBN4~S@kP#ed=g~EM~PJbUSm{5VYsWnp8Z-_U6F>jc49@qaBiY@OdgN&DM_Kk)RN>~ zL)vWnw$nPDhPgdU+s<?>NUsv64AVgtwJX$MJNl6v)$8&V@Xr!-vx?Bs=L!%K3+yO=MNGSB{HIFci z6livk#?hZE4bWq=TLfb9J2Dt{M=Fk*7IhaTKxAakC+t3Sja;63y&)xCDq$lRI-=0K zCWD51%*`_Sbq%_C zaj0HYwaZJ$(xvIVDR3VC6}bi7t17t{<0g253SHA6r%68yhH!I!h(R-5upQA8WwxDD z;9u!UR#tIPlJUtZ{$^a4Rw?oXBcn3>kUFCFj=qu=T8+!Kml<_l{hSxjLm9IcMjv+} zEH&E5AV6K{3`-%(*W9emmrH2@MQ|)r$;7j6O(c*QV``9r>If!#HDOAl`Rb+F+4X?S zJQlttoU4@b*m27h{V_shz=EFGqbc=iK=w5>>ST_(VhB!bK`kA&`%D?2`MDP!8>^4a zMh1~KbeljBB30XwQDxk52`oQF0$Wj^&vy{re!a*E;jRkv=KKp(!4ZYsCm;e<+k%Um zP`fL$Y29~gt2|GyOc$$s3qm;nKI<4{oR=unJ178fqmlJO0d~2sJd~tTqF;i|w}S~j zKakb#dk)wqhMokhmu6GpUO`W({U?vMHwK^S$1RhkfdJfA0Ea)8X-*o!S{Y+HGFCVK zs$B>m29t)D9^^R zgSJkLjYbKMM&qm9=!YFHpZlkSI0}6~#3%7Lo?%;qZWCK2Bk2T9H``J|4zy2~{26Oa z6OSQ^x93NelNZ-50&h3^V&yt+5nG|2xlwu5wOe`ej(vs2=R?M%kN*;t;BdrjduQn2IN{2qo?y6|4y-5?^~c2r*-w7~TT>N1(j)FL7x zaru!Ge!idgEPUykCu-_#P4G2!OFUmYi!c&VSpR`o(Ak!KW@tvVj8yP4v5 zpnlr%>FKR*bF*M;U`N4XX))hhc^W37(k-@Jj3kn-H@j06OPI~Y}-Ke_=7Lvb)aTKU|Ox1v}Oc6@|kza<-SEH!vA(6O>{b@5bim%%|XKd^Y0s5lMFCPMV`UoP>^918W z7#Hv2+bVL3=XuNx7JbL|ULWSFVL0y95T1fETa|eR-PIav>3~~%jqy*cf5Z!rGxGa) zUB*}K^<@iT^&^+%i5-@sS#0y_Q^T-6m?agzgoQ0YK$F?5w^eS;svK|Sa0+2$xM@ou zv^v`y?#TX%W+<3ZiIdwRpC*bDLN~^bk<>R{?}$Qkp+-%kok<78Fe=K>^Ql^8$+SoP zik=#5#P$b@88Y@OTyOa<;~x`prRVl+vkpS^GrCX2LsFHh%y}1!Zv%2>#(k|~U2_^$ zuU|FZaE(9xM4ttN;8?e$chkwIdiW{SSQ&odNGj#D-C}iY(H9nm?@i#2f?R!wm*10V z{V;fTNR@={??!WVI&C5J0v6$rcqOm-0tJi40G;jfbXTm(VqT^9J#=f-YoIj-z~EMv zvfxq53F8=!BMX{CI1Ck@DA=H9$zapoJmsE)*y}*&ws&&JOzBlGoRtkhi9|tp zT19cehvX)QZ38;ahX^ob#OE~5tPY5WS-~GGUG}ax!!mHj-=L%eOy6Bwrh_c+7ao-I z85NVXeG*FGvh@)u0&v++A8$;8GC^!&>pP{-*l&zUWCC`^Z^7QNvb@mnS94*qd=fyg z&K%#y{(izo$9WL}8KsAGt?EnUEDmS&8zM`*l&Q6~fsi0({WX^1j{~=XsOU!DqDiYF zO3Pekr?z(!0pJ1bq`k8N1Wjc#8OJ?aOl$s(u-`0NlZA$D#CN^&7F47gFj1*YAU9BL ziD8-h&TLWY_Ku~7=hej#Zs2;&tlQZo{+U}9w@QwNw8rNIe)~i z={VLDgfrQjox{`V@{faLZ!*+E0oGz+AWbnGJ;$~&v zC30fyrbv)``#||qa=1{pGDdl$i+b?}skVY~HxGA-zYkA00UpGIy5fXY9Q%y!omyL! zHM;r2pw@4aL)f~2H4PK;!FrB@Pp7i!Ud05cdY8e)aW`!d>gAwt&8T*v6z8{yKzHRaxeR1`9?=bx!%W$Wh~lM-!c=PJ!Kw`RA^ z(ir;Ps}QzQamp^X@)%d2ROg<5W9gSmvIpg*JM%f9WHEpaS(t6NFabY0N`7BZSS_58 z?E~g$4&k$*BLZxC9jUa|F&%6W)CeF~ZQAvwJ> zlj)6!YuA#Qu}mqK>N;q)u2F;-%;7U3)QLxg(RbK^bT`9( zUuTRgXjo;t5W6=n1|Fs1t5%IQN?xw8zjPc6IvJ8=RIjt9oz0NM!Mhst zrnC&kKWcas_QVzNOb48*P2@lbuqUkQM7DBC}c zs5y@=`!c<3{~a^^c;iZ%+=#M$*czLJ$CA<^NqV38MyXJQuzfT^-8z2zNUUu#38Q23 z33dm|yY97tj?aAy7m56?g69of(kUq^EmlL5oay7 z$L<{F+iZ`~Z+7@!>l>m@BU*cS#t_7IlubLZFJHuCQ_*;Ndn;nU>5AkP3*=b;+K=)e z@41aae7ahm4551a?bY*2ITh@d=sLmKG9XbY-~JHydDR4LQ)XcL5Sy>^V5sVS?XZ~s zm~TJ;CS<#B2x!)96-3EttsIXJr<9K6$Vx66#^7^qoUkXjH>B-ADXp4pX~Ut#15Y>b<{x z=36Qfg1!e2vXXea_Qacjb@6T!*9T@f{GC-Owv=f@>8DGIDBg zFsgdea4vm^^1yO@7Ob*;cPQQn=SHo3hGE)Rr>wn1ltf~xBP!y~*S?TvbysRVORtZ6 z4SOh8QF#ktzRHO&%`}P%f}J^46N8|7cZ{xOllT&DVB3ruq%t-JCPg97#G+kDT;kb! zMp$+E%95xRQ>4itXLOKV)6C%K>R0@Pe1s#zsPnd^0=VN2hr0kGHkl8xT2cU~N(n!r z!#Hr;Sh+)Wd}G7O=DMA**5&wd#({Fy#)H4jfiWuTeyY6v1qOY9IN4^Xnlj8L=lS)k z9FKwU7ZyQVv#P!d@s;h+dV%{1`5r|A5P1p9J?9ome;tAe_E8|0x3UPA|9IQ`!AH)d zk6oHPKnWT?6z49~Ghf9_TJu=B2AaEds2q_~ohXC2op-=3Z*y3BK-oJYW`^Vj0F8`Bi-G6{3(W=l5 z&_F_dnc+4&Q4_sJ-6ZFu95#Go)I075G6LYkc2B`kMb*N7OOsenYF%%W@%@1c{6&QW zWaP)GQn(6|%{}AG$^3tf(!GhINk#i8C~fIU`sQ;bM7=?q)hRQ-K)*;=f!=7oit?MI zP2>x(noTx54b(h8i?i@(882=-r_yp%ddCV;*H@Zg85e%y%_VS8(FTxgN~KoJL`$m7 zfs2ZGE-#d5H>tx>SG9+x0{w{aJttxWzXM6x&9|^WvX~9ToW_4ET*PWD z)YkJldmi$zx{PS#?yn3Aa2kjcz>$&)hSN)G95UbvR#}J*RBSIU*)Ng2j-za)!T|gM z?ewAHEwCEnx}Y*dmNK|ALE!wg!NSu%DK~)o3;^@5uAlBsFWqr3YhiNY>Q3a1+k7xM zgG>#qTDox4G^70J9B33bhdClE%TUYt;*xm=ZEZ8Ea3(ZThYn;Yp41&Ro5uL}qLItE zc~U_icP-&GYTif4NE^v;oVRh(K;>V(6qOq!81j1)HCZG7SDB)zk1q+Vm z{+a^xd{Z@1pz;NWsMIn^N?pjc}%#7hM8lFW$&NHHcjJ? zqWH`EApXn6td?-Ie>CHZn&s|q7R2t^&!~&2yG5vmq%y`T`{g)))lypB0z*~1fd{gC z!l%{tSqm%m;XgV}E2 zN^n~!BcLxP=(!M`pkw`j2B=p5@|94_m=0+RCw0;ca3Rm%7IF?%uj7o`bDq(qL&;SS zn#q4LW`b5<&0lA4P-m&cyA4=O{uROVBkwvl&Y!{DdAu8pY%P$pFiSbH)28#xj;zY# z$^FNq)TAKC9`xP6oko2`53g2-+#}C}43>!f&X@bGV`oO&b>(y3jUB+CF9}<&e$JSO zEZLQM+j6fJhET{Ad11P5cpSy3+%aq7!Qm47>Un;{jEEQL?_pb_%I!xw`-E0CkJc5< zUY6bj=65tCi0M~&A4I67Gd=#XZEn->THusgnlk*RLWp*v_VoEm4y2@)iSK0K{$K0d z6zB%3QreHTv#swf+n#3VALS_As-v+U&8jj)h165{oc{b}B$=iJMu~<%B73fyOUhZT zsM}*@y?GRZPoI?ND8IP=J#hgN2>eK02IKsTRn6k5wTfD{TJ0j_CjyS(8pUX;pX+jnLt*qh zU$=9td~gqf5KW`1nnr;3mzn{ZzXU(R{X*Aprpr!DS$44)5Zl^z<-#qBF{~C<{g8H! zf0jhFX*4P?k2x{;Xul1*lpCjqBGB>oK9K~A!=Ie#KpA#S->dF-6n4i$&q$6)op}+r zo?`si1CnOpOo0oy_GkZ=OLtL{l(`CwT-`O4McRQx=5IJZKHc5nV(cHYabFQ>8fH{g*TrZKH?D%|pOrVRh~Ve_I1vAtt~fIM ziTxbF^(Q_~Of9a}=46Fj(&V)C^$pHxVBa!klQwYcX_yd%Dc)KQFjhq*y&$u`Zp_}qWGaah(8cw-&ow_K#6g547!ur|@f`2p;Q|~Q;)(qS zjw;#Y2lr+_{Sc<}2Y2lu*)@T%^N#$6xVH8$amKq-uEszW*XCx4g3JCie&+?9(bLt= z$S<#>7({@++!TQS!-*YL1& z`uqmoyyKzcpVbTwN6NN;L*Ztr(Uwp!oxp`OIHsOtx%vt~)S`C(R5ZB6JFc90jJ7%f zQaztz@3|OT-32a5$vT5Q2~EW|N`D>U2XKW+*nSvKheJ40P*aof12VtI>x=_{b7b7s z?Yhy=A0;7K{DwFPV2v~y%t~?=|9*4h^&;_KD-Q^eKs~*GlmW9aBXo}-3K1JIoWvQS zMZ1OI=P2UQ<#fAcXg3gq?nr)kb8!^G>d;AriH0TtU>H)?lPn|)H25TphtloTwq8BS zNE7lzmYc1u$nNLneSaFS3)pR!l(!v$iXEzTL&43&a$oS238zT!{UzBXxIoywhDkl6 z@uJcHvW~+G{0QLpQ~G(>-?@AHtk`ZBgm=)aM-3Xy)=~CR8{Y6-Pb$G`$S*TO%YStl zWjvNot(4--iTvbA7T-QZ7XRgyQ2}1Cu?rHn!vUgpwO->c3kgmHJY}z@mHqCVSe5y# z3|w9_`fpPN=YEF{0=LNUCh+_I1WO?~81zPbdrHU)3Ya542+?Bm*Pg9yz-5HPWqFs( z*hnCfCgv)5dud1n#nGh9lDderTJIw)x^W(Tf8obgZhfXxRo4?qfx&xgN}G0Lk74xM z-IHaiVI;CU0xT7I$O7%PAKoaOfTMIq=*rq)qLy{!n;)=cR%pv5HZ@(e{P&i1( zFdK?b2o7@pGS5x-s1A|~T1cFE55(?Z{M4`m3ilPV$dU{kO=C8bDUP|+sJ;gK{XLsq z*rUx$lZB>~lk2l(=#p^I{zQd0kigkGtzPfvH^{9s+Q0kkdOnhna?xZT^0}0bmAqB( zycWzIADF~J-Bad|@d8Lkyg&KuV*+=1N{o)RCl3%uKbPH$e;gs3$o1AoN2@bZcQjAg zCfobrA7MJ6xQnO@N%w{PTv_x00ewUbUnVccJHT~wLZ-{yQl2O{>1Xr%%l-_8#Kho< zq*FxNJnf}25R|L&@T_f``Sa(`+J!8dm2{a$u|cXS5w1lU$;l+S>S>`l-;|faQH|PN zxs_Fn=T?wFsV>s0qw<+(Pj7cIV?gJNpH5WPA8SZRv05Z*0fb^t6ZpW93RyBDhOeVO zW`F)YN_P*c;HTJXz^LldL$eMjDmjIPMdhnM!V}oC`=#z@EIfnE82+-vd?X7}21CbY z%Tu>S7bm#N-bGAF(OqqsZMXBvlh-g%jGr~(oS$;B+Y{`lCT-{2YXi=fIE(tN;}9LE z0>VeIk57T(H+dr(WNvpfhS@-Te^5h{cQP7ptVo2El$x5AbtJ7aJN^ay#G%>zoc2a{ zgh@ElTTr9DN8Ag{0os3dp4<%X%6!*?v~!*^JotDK|Ksr(VeQ8YO6^88nGiIg6F=-{ zu*EMD;*U4*ziU+}++D{ND+M(lZ<7fKn2M0wCBF3CNu-eGFD`&CDK-<>9KUh{$oBbL zuZKWk^)8oH>ve$_5yLb2+$_l`>{Qj%;8Ro7)9uy$EPlsIvXT40Jmv$jX35zIgSUrR zxVX~e<~|r69-S0Bb*=s%UwHa8!g+hZD|Uwq>eIrnll^q1$g%Gl=>~!z+FGlV+JJMe zx>;Rlx|y*>J_cLn?#op+F&Ku)j(oAeVVFjE@IXF+i_Lh>!gXNYwJ7D2yB?fTSt|JQ z$xtKy%bw7zSa!c*ebdW6-g3k3$F)|_+QI2qSN5UJG9yDS-9#RxEIYO|DUtKZ$$CR7 zv-z61zIlE@X4yi5P|eZq)fE*>ipb3o@VGZ zM9+68SEl#FlABk&&A|ZLRKpob$t_dm&GK%fGHnD7c(wGg@#`e?X{IwjJD%}jqdtkq z$u_&G*wMV!$*~bM^nWbRLKgfvq1!8~^S4T+s1S1`+9J)x5Fmy_XFQ2vPLKQ)mBi;7 zjs4Sy@`fM5sq*fHD}@1&r`7uH(Sk6&<08$fN=ygQlX8<(*YVo7G4~@#=b}h$4z4MJj8=zZnR9FYNC{}u#-8`eswJ=## z=l!dlI>ED`awad1aU_t%x7ihQI*2j? zcN1+LYAX6DmSo_2FKi(>#HU>q;0_|#ZV<&tgQQq@42!qC%U+Af@J!7DV4_8dibQZA zDn--cMjpYfpP~~E+Ju&>=4ElD^dFPR-8tzC)t|_16rh>v zz!cl~G3FJ&SRg?b5FKaTmmNuc_hlR34BA<`dW(2>SC-dbZDIK<_6-&t{>yA>Y5Y-^ zh-Y{Ii9WhQ?lg+QfBLv~5vS}n+w0>~XLAYJh@g~yZrBqF=$PB=E2$qY9}h)vxMwj6 z4xqD~nj&Wt(629~wAr$k=%PK(fMgvw}rN$ML+Mwm}`$|V3Ye7&v@CB5-_TxPIw~^-S*34e+hb3OqhFzh)rN}6>vMue> z(^9E8{b?T;;nKOU<{@v{uPyfAqGtDI8sW^RcekCyT+QJ=bgJ8Xq@Y0A8glqmP@O3)F79+Ud(QIU+S4K+3HpkqIh0D zIiL<-Yg$SHQlT(O9u1{Ju~Ds~_1DzTb6E@j5yoIGMEEUkyU@z>TBoQgb}vauM(hUW zblC{4M6vvTd}m+uYOB>;E%m8#V+>Qz`bS^BBE0Dh(-MMiVfpeuctRASk5v>`)iar} z)yW9vm%0g=K=yrAlKNyArN0vM@ia>(tG_;fZ;TP5Mp)*x6nJiryeI#**cKB-gt1`& zxOb4jm_CpNywLO}_GbAu090h>koY=+&OCh*7SL98aKJq@M6DS~_c+odx3m0Q;LNs?i@P=IJp$Im<$PSd$$qHD2Eg2lKqneVYC|KDYdD{p~NY>2r( z4#DY6+cZ8Y=%W9Xcg**r*sT#&@9s0XTPxt?rlyO2g$_?~)=Nr5Ln8<~D<3vyaZM>V zR^&E|m85un)T_AoGeq-mQEN#8T+#dYhR_t-_0DTu3hr0F4&!U~X(e+^YCjj`BD)GUC zwWi2-paq*A>@SJ%F$V#=aCZ~PiARG z3p0X==sgv|STczLmZ3#MTiq)9hgb~`C>c|tUDFp^<4#((Cbgvt?K0cS6O>oBoG8-x zla~!k^wF{}P2RI!TJ$`Aw&&j&S-}0poCxRfPJ+5si3Po(y8YG&Zmk{t=?l2DduY_! zTk>HW8QlsQuPNX=pN51KjE6_yN0sU2wDf6T`p}={yI#xir5GVp{fRs^|;_)g`a*GSiUrV;br zwd37As{4T$%sMtcCU5y6O*=SCz^jc%*KYL*PM?KA5~_+R%>f4do1xoJ>wQFAtwEa}GT={5_Vn_)M4`;7Qk zh(z)RkzC!M(ayA=6mv`m>3AjiC!EFofslZ&HvY?-D81!dC*5<<)u9L47`o|_?+6nL zS)$j$!AIG+R(1ZXKEkxVSDRYIcF9S84q_%oHY8-tEmXOAsOadIX9}wi`jo-4-4l#? z$S^JCL>KULFxN9&OR5&1OEQJ>&aWdy{hCMQVh#L|b>M!bEb3vSVI$yH>~px`ZIa(5 z{)HrX{dv7BYnQ`Bthn@^fza zq-kph+N+09fXp8>MCu~Jv8_r9hN)#8()7ofhz@J?h0&tCwEI{j1fnvLUp=hx^4(2z z*1KzA&-3vBaTh0Pm)j2iX2d~r+lNVa*!v!+Ez7C=cS2a)0Iu-1JgPJ7aP=b{$Py7* zp(L91=AUGWy8zWd$yZt{q_5ttl(JdJqo4`&_b1Ve2KKvC4GVeZVOspJKk6U)Cv+cg zdRc;P8WkXLp=^?(X?bB(Z?D0)F&Zl?Icg=(%feovs7?3<*qHa)y~c=VHtJ4>?tzuo ztrgBktgvL_eV~ky6pD2BRLzzY+xb=}?57la@rT^bKyp=myS`OCR)9eSx(bM`5rv&$ zqWHfw+r9PKcg=2nP*1YCV3o+Xn+iQVXo<@Fe!RMtOVr2rY&gf7+D z`=1|KuU;E^ z^7-xwqz}2LQoHV~iw-*l8M8AF#|f+PS*7&eOhwchqt*x7J4c&wqq*%GYNc#X;eJKE zfKIy|tlpj>viyQRK7jk`^$1gQS7DeK1$(gCa-K8XDfSRLK0jP)NV}3_w-1$uXja?Y zDtJF8E^AY(P;h*fBzs*}Cbwi}r5lhW-q@=vl@y34CK=U+EGzn!@!5`bp4Mb&DGs(D zrBuAJ6Ko+~cWoG;$TZ;PyUQY*s3jwI18!E(x9q};q`&o+nc)`+ZL18WAduBj8k5VC zP0Dy~Ih8?`BA0Ttbmw#QZWs=OR;uH16bI`%Ut=8W*Uln^nt0MH+!!69av@AK|k zJU~24oECJyK^9261dfxBTlkb{A@_uQ9%@^B&Z~0|UOcV1cxT8SN0XkA%O)IxBZRn) zh20_rIB+X8YCIp#6f`$C;t{e!TIPqB*=UB-+K5tEg#rvDlONrhMNBVN-$fCr%?M_c z=NYfeK%MJ*oyUwFKXIm%M)g5H@>|TtTETSr2Po*PkYCjv;FQd*9?(rZBIXT@eVHqU zfU<@)eZep&T7vy9rA3h*5ADqGQ4lsG8TgF^WfJ(+YEL9OkAi$}N1j?9SGr=lxZrGE zJ&lNvZS2x&k7|x+1#;-7r@!j^#n4r+q=I@!YsKxXSdNorY0=)T2;<{O!cu=%_$u+{ zCHaz3CPg56wZA2uDm9~Gdi$6zc+GG5L_!_y_Gjn`{LAwZ4i!RlPT%zVEOmkZFzyoX z^%z(DmDl4eR}V!Pj_#iL{+Ux0jneaNOYbyM7pVlUv*fR0fkT=W4X<>anG_K9O0w1+ zXWral&(XL7l$h8pH9e_G;(50jd z@`|ds?2~0hT|@w&WO&QB72`6(%;qJ{ph80GFq9;@pkm2AOe-bP7^An3&btwXeT^B15rGa)mbJSDIZiECsO{e2W$wv(Az= z(Nx`R@k9}Kx7w|Z-c7tUpAfn#xnF6zecoL*W}5yxZGRPhr;-b{`K7u1pA({q5DviA zWFpPG01YH-=`1aHs|9-v86Vj!hgcoAmP_7tv>|ycTG~ahnP9^}SL^!tTIN#?MvS(* zW3+vWH%mLL0<^5gN>PdCqcaAhHr?%upoFQ>jmb(`G!!N~clzY-d5`nhV0Zcx(3!*M z*PMs50DBbC8|l%(Rcx_D-S^^%(Cy(bemn{jZ)%-C8PdLaGkUXX-wc9vAVhIjO7YAkvY;m?}k!)rl|bAzrEADt!nel zLdb6u|DDo3r*_9|(QW2sW2et84anOw2w&EU;-~3*w@F}hi$c&JO&vFQDk#jL6Fh8C$4vm^&d z@x1oh3}mtl`;f3e0$$SF;5OH93dA?}g2Zu+(U_LTi?mZw4<9gBR<1qK1WXPV{Mi#N{FT|7D{;Qp)F2>;R@DDb1ZzBD}#0xp9R ztFzxvvBqH-z#{GRnO~kop7hZB8mS8?Vmgrik&7ziHNL9u_i%e-F}k9Xbyo{01V7Yw z(wi~q52e63To-&$`Ar{%(c+%J^PF0)BN0{5W3kqcdPn<}eymaD4mc^kLM*tetQ zT%zuOME?%_Rni>~#@j}*f1JcShz`QZp&Yis{D+U|T)q&qdvV!rlE%jpr}PQI<5RUV zs@|k5abr*YyKp6h?iN(!k?s%qN6AP8kboYM1PCsFq?KRT_QWDMONO^~tf=R1EuPem z{M;|&T!ZcgTs^+eGxq48d;w7KTl#+0-{!>O@<^Id5{E|e(_iP-kE+*||Mvmc5=TG! z$2y*4-NkNnn0~vbRkNW}9v9eh^#LI|fEKt@%MbjiOLlIiF!y(L7Wi4X&^yk~2rFh^ zf1L;Nq{SDTat(1X>&Z2riOC6B1bO9WCTIj}Lo6&XZvtKUI zuPo*FSjQchB(|=^G9P7+a=vb3NF=?kpyZ8KB;9zB2o-@lpnQXsX51_5R~INq?5LpM z8O~pE5~{cyU^~z;8Mz+Z{f-z1azJS^2~M5;Tcg`>mPpYo|Fax!BzG23g6{s$-~U;K zP@oV-D>0`+hy1Ti{Vk3Hclvh0Cl3jDL0YhTQ?&5~#GKCIKzp8e&GAYqSKWD=&GVeI&CjpQhztYzf5 zTyp{G`slB)rLF#1tAw*0MWg2&hA)8ptS}Ewkle-5>T~JX5r#T0`#m0djT0)srSa%X zw4SWYh$K@*V$VDP3+Q~1R! z>IdCZ)6#sWE#^wU*zHeAa#|Urc@r~8^~SMhKE!37I89-;y}x&3nq9d?vwv%4c^j0J z?BgR&Blm(o!lC?W1ht}HsZoDsZr3&tMYi@YfrhR@*Cl#rK<}ScB;fUl_f3z8lRG4C zk6qIyZhtv4;vggYbhOqhO+id6DC@KCk%U{PXJ`xEV&lAtW!v8PHd!{3axJ-Zd$(dY zmh8#@h(*&QKmupnFjXUJj&G9Dp0!V?#PNZ#p&N6aiQvjFW}LUF(;1SoT+S=mInD*= z6tS;LVnn_d-g1O}mokq4Y!j>T5Nk{nv*GKhQbX=um~o&y6_A2r{y*%!Wl&sgw=Ehh zSa5fD3l?00ySuwu5Dyr2M(8OaP|Lpp5yhFl26mO>dsqrV4_y&)>U5i1N(ZbcL8Oa=atdU^HGtq~7 z6u>MQ_xj)%GH+}~i~kTJdEWCE#YWuh$Pzrg^-E4;xBiNizCn-Mc5cRDbS{l#9i+hJ zF~o+;eqlU$uQ7Ki>d@~sT|8a=AOr>fqc9s0Gr&-9aUGDtbqPGabF znhpfW$%e(oKIH)CLo6GS$hP?M^d12yr^-)NzV|s;?LZ3t^eJVC!+yn#*QrcfK=ibA zGCB(O=OAulbTq$rHmw@HJwu$O`J0XVd4)`_mX~6_m&i9bk6@}LYNG5Y8`pBiC$vCo zkmX(?#OolKzJN?Z;G3j$TiTD$SVzVmTUVcR#syx&+wVL}%jUk-cnZP9Eqk>Zd(JTG zw-+M?bN;XG{V@a*INF2_W)1|NIS0r+w~;Z)0ddAJz@T8O4)`+bX>sA;+>*JSi2U$+ zdY$HL7*$nJBq-Hrbogyn?^McsETirn$AnF6t_YKzdi7SG%Gd8?)vbd5-YlcA^^9CT z_mMy4qIEiadw}dLLcUZYTeuIqe0}3j(F*b4X#{vqB))_(?BQe-%zEda5e(iMjcd%R z*pxxc`~(w>Iza1GfRtyAN(y>*5Wv@X<5ei{=V*VFx^TTOM|gSiumWAM8*sY6*jp~q{k=e9o)jngxp`M@Ka!8t9lD4ImCX%(tRuXVwiN#(|v zN1I*1<#od(w!k>iaQceI^V~_Z%nE~6zr0lO_+p#x;%?V`9M}7N#D`}cdh%)aK~Gzw za=U{G^Wy37NzO0rWq{Ne^awzCU5dR%pq8R$$Fe^xJT^r#U|we%*2$HZoM1#x=IypP z-yCt-ZLnQ=-1=;4ZZf9=-F#CsXT<$qP#fL+tmw%B{f6J>Qf(^{C1-;;j_b z*0UU+^_}mqUI69AF63IVD12$4=4*3RCaxj4re7_9{bP0}HzJtRwCZA3zho~MYvEwj zR7arcq^M9!K|vvs@?hAb`c>zb*+qNB5;cZ5m@Jb6F93Zl+4s@0{wOgxDz=+NdR4B; z^IGfR^5m7~%H?r8o9P8431+|Di}J=K(9Opr6TsjqJlrr1mwnFY3K*y~B(t2q!1$y= zLg#shWDozi6|boDb*o&19+&M~_8e-8-p$C2qj3i)O{38$OW@^E;C?OeHd4{;XpTap z#$U|yXqT+Fz86mHJ`{Q6{>md;E{j`M0A`(zo_?e&KvH$5t_Fr%LqnA2XXNf}$IEEm zcn}6!2w=Mv(%;E*Ee{QVFV~FMe&JX>vY6tc=r^m{>+~1?fMAn3Q@#vurE$^6I-r(f zV>{^I*DNnUKw7Vk8E1Kt-SASqH@JRS(Gjm5=OOI~5bU?iXUqAN9Cj`MEHylQ(+)m6 zR6R8wVALHe4rWWp=5E+dR836CWz*kD+el`~JhCp7%z9moa_B4EUm>O5E5epvPAQKT zITg-E@~hyK`->8Da)##iT{_;{B~*s?&e|9(^NnEsYfK;GUv+4IXnZMY_LG~CH;0N~ zVzL_v1JaWWI-^02UnBIE7z!V54~69gmcyIfPHh&{>NP>@N;48R1l`*G&WBMdMM9={ z^$P2qi!~Zk4wzzZh^**59er-l3c7Rqn4jBKv~&Tod{g+4{-bL5?4@^!ws}{cX+XH% zQHa%uxqdOlj~EgN*<38FO8HtlAqy45N6@K?ir#FgrgdnrjmKn*8wKiQ;hK+biK zD1ti*kwaSHcLGc)o2H@f!*TMby}r$}8@ZL0AtP4kodNz`UWAvhvdANzD2NE_r~B)7 z%OwD+_u$7mMX_F4acK|f8ju$^T{nDlbp5bM2pjaWYp6(NxgFI_$^fG213 zGrvCk!c}8sY{>sKFEbG}nU4~`yaOau%EZF2(#2SruhVp4dKA8QK2U;pS13P#|J}gE zA{J08NrBW??Hw;&fOEe}xof()N!4z7r`GFvJbgp?Ui0(#EFoXB)Tfau_j4o9SnczU z6C%j2#8t0~hPkYanJ)wpHuRxIXS);Fipu%dblUH=e9HH?L!lAOQD4uxM_#`keAGn5l(i`2%|6IB!g@VCs-7;uJGP^qWGl zO6dz*Vqv*=8V$V9V@B24h&J&&?g%d6%NdUfaSOtp4Z{#gBm3@_bDm#jf&65{U+KaY z{2j;rik;SwAVHaNs|dW$N{8or)*1<+?W3!93 zBH*Xyf6wLMbvtQ|_0C^D6kAdalE`;inVq%BYkHC56GbQQml7o${Zu|zpZ0oh7icYp45Mrp*AFH0eY*5skeZ}QOqiI^o zG>5DyoWNObEr_T z+TsM8IEs@6b|eGfu2Ux>cbtSK#}BL9E7sHRgFBI=u>XEs>m#qZNzr)bBRc-Td^8I1 z<%fM(<$dy=cxL0f>0x`=wPZ6Ll5yl-5;C@;4*(qQs>jOHEPsfH)Y8l=TNklBPEroL zmC(FTmm7fmxzxb?(OD+L6e!sN(*^XEYulj1F^>u#-=1oQSG&Pi$|k#H#~Un{9H>Sx z4y&$zPH#||+HxBFK1GVd+RyoUmwqMfTfm3jY)0Fqj>-kiQwITne)Q#wFu#KCg5G3d zG(k2U^$HT7$?2=*`uK;`2Kxbg!p^}+Bd?N@&P9DynAwG*R%j2^H@vBV&oy6G{d{<; zsh*mY)kj&i-|?UfFA4X{;Cy~_ZG(doN-hZwU@pF}?p7#IEn77EZX)LUQLD!3P+|Uw zmHe{^8B+;az8V^HnqWg4$7P%rL_i;QgSb8n&5_iQaeev{M8GtC1 z;xsSGRAgQ~?SyHR+5tteRUX#QJnQu~Ys4affa64wA0wjx2_T9VN0F2~8@8ZPZ|T|3TL(%j@RkbIl@INHng?)q{x%Y1|e}<+OT|jOG4CqIUPhJE06U zDWOL=mnJB#!S|g3zs1Us8?qnWM0Hzy#j!0!hWJ6tEYvV9A{!9s-Rj=oxGa09PO4ZP zo?9=5E-zF%-5wBQ)Odj$Q~qS(cxXJBalKw}aIj=oP+!GTlT-7O+^z{{bdBa7ExFwF z=H&LyxW0sa`%gvCv=-^Zx_&>M3ia-f-N2N>9&+9hm|+w;XCO$Q%0J70=o?C7FJVS~ zC{Su&1igJ_zA-}OgY;M?iHGNzi| zOM7{fI(hoi6&Qjl15&yTXJ`b)fIq3I(Utf-+~8^MPkPynnzMy5s{C;3X-g1ZjnI$h*m-uATyQU`mouLim8SPm`lfKT)aqQMJIB%}w z1f!R~y=o@fCG&hb9)0&GAnxg4?WExQP*B=Ywdehkw=`g)l{2@-vs zIq;*$KD^r(Pq*aUf0JFWz~x{?L`C6!*2}%cx7_-qlkNLL^)?7ttdl}Q zS7}f-n0+FA(_aJWn!UX*4LYY*?vI@+w zo>xkRlEQpm&g>Z}HG5oEQyN+=C;G}AE0mzOK}_wBTCSHpy)-ICMjo`fJIu-ri}fct zu&c5O9<9rAw#UZMhw}9nVl}>LWJt`|+%}Lt?{<`-#jjRh-u5B$&Wr25H-y@g?^-k; z^L#NMe@8z;0BU3wDV&bVcl#5WPfnuC6Lz!Z8eLkaL4HY7ZE4&tl&UDu5|#QK ze3c+hXN@DLnNm%%Htcf~A!6Bsgap9e z6?PQG0O-%pxLlDd`3$-J4_*&-1WId0s_z_UPNh zhC0;>HPw4!<;*io3C!@L?!dIdWu2u_QH#7VGui5KX^QhQsilE_Kt=C*aD{uk>KF44R$9dOX659X zknh$8&m(*dp!!*^cT;>y#LE`ra{exu$K)60{xEX>M8xb(-FB}v^7L|aSz!Tmh3XYj zEs#kC)aOSpOq3}+&!5z_V=y3YMhm>%s{~rtdj??qY(0VmbyXfC2W((}vG4Zvu7Sw< z;K#G6@$ea7koI}bkBZFmCetev^QEe0Lapd^;-SHO4c8|LPDMn*#&r!Typu^!#lo_! zt9)Hw0TSH*ZlM@&*w9tO-ilQ8IF=M4VWlCoZsjh%Pvcq8F#38qCc|;A6|EA=C-%YV z{KlB)1Gw&Z%OAmf%H~ztJXP1xJCJE$^q-FN6C@tH@~dqvG%HizsNYW*caRx71p z!<0x?S&UQ|;v^AN#$tX_1l+E1?10K22T%>GjA3^_d$nkrMeplCq}$SF8;a$PTL4Gz z>BHDtCC`sT@l;UcZC?Q-f(qO`Z*vp@ovrc=J}d_9`kcaS-yt@9Z6BY<^AI2{>BdEc z*^vM$R!?wx@rDHY>m1I?F!}v(w9jtJ;L_B1)(F>cO$v^9P9QpLmI^=q4#Y0D|>9U4)4gXyZ-0`qM*LwxJ>4AowiM3c`f%P@qhUkLKxF^D7ZLoKIhe00sbmY|V^$!l z_NeLamejQ}f+yEiQFKXTw-sS(yK2QI%O1M8AIb$Em(|QYyJ>Fh4oorEaSAWV5G^~U z@}_|7v#i%f&9<9VvhnvGJc}@$CAvLdY|XaZpTMZq)<9Tp^bMSVx31ji3sE0qc3z!!u=8 zp8a(mm;DI<1}%-;{jv+i)8OYrllkHVJX7s-4GNHm|0g^D=L-pPaGD?@TI1+oOc(5| z2F;>Q$MRt!j1I5l%quZz@r^&Kd&SR!bPXolcfKnzoq%ACNGD zwAe>PBbC|MRibizaFYah>i zB460_uZDfjyzf$*9o9*KZqIQSyW@0OVYPN{;~P6PjvuHW$Tx3V^O=mrZso(By>cMS z_l{Wy3K_G27HH1`5q{+f`fP56KIa=08)dokHRdiu*)K#6K$^>=qER1nbnYUiiEIwd zz|8gictb=9?~~Q%#eA;ud&N~lPhbA3G7PYP6C8k76i4e%CNBS7xM5B<^1c$DAeLpq zn=_ua#rGIB7(i%eDs^>rUAWtDQocIz~7z0-fnz7^_WR2!3<^ z|IFKcbOP`8!397c9}_#zH>rwrTW0%QyNWhm1w2n^jcDyQ&)skWtP` zSrY)NRNDf#;H>9l;73<{ZdpstN;YU)X!wh~Poz~HbwHkxqyFA(XT`})ab z5hnHbUHi{#Sh-i->H%c^ul%Jx%0E18#UokXU36If%oVU)eSYedyWl#rj24aH70U0u z=`WDQl`8c0ZCN(% z`yc$$@7v=n4X_{;D7k@KA|Hd_{*Ceb{T;9TJBdFk6F_Y88z%j8<^SUaAyB2`{pTNV}C*(RDXcYSN7;9bF3xTB%Ihgo%9VbV&7d zkaWOVB4~t}^nWb~U|9&VP&wS5=r%i?vjm3>!05pZWb(v1KXL!O#^a#T28y{K*;PN4 z>_QTIdyKtd_-Eq&<3%SOxcOLC;bKEi<_!uCjwF55_GtXDB>{S`EHFPykqW=_kp3QU z;1j&X)`q67orN`=`!E_2XGas5g>_UFPlA9Er80>5J<$K{=!1Q~cClT8j^i;~R^G%I z#MGcLuNH7-8Ugj+mk4nT!25`!Q?fC3rfM|C*^v_%tV0Z{*z(-1vipxQ{A3IVLdHxXoxP5fH9yA%zv8;fIUhH8CQFT zL65&=?oR!=HL<}?6kv|K?XB?Hc*#(Xzf1bB^Aa-xOK!OyacXN}@it!)&8c+rco794 zC(7<$@u#l3(f@<_{Ovw~&g$||4`xk{mMEh0RF1IJp$$H|Gv@w z-zWSv|NkFGGG@Zw{8QM!0Gf|P$efNF=%ZPz$p9!o7y)l4k4l>@A1Jsy8JMN~ZVd!= z06X6&3bXy!m&JUC1TX;d#sj{#xYvDH*{gp$?kb4pi5N8`8b2REW#9=H!Xf1UDlhH} zMl{xB0sY2sbCb1J!!b%8^+3Pax%N~txr54P-z@QF0R+k<0C}_+7+YpLEaU>%Yq9{u zlL_cIH});?H!KY~27m&H8_MkE{_AKX_J{R;x<;xs+NL#50OXq{GUg{EopVH&H029q zaEipPi)%ecxs5##aHm)|p0N$9%JpB6EGPZ6m(mDVCxwrZCcnk@d^6~Jw5xkh@$XWH zK?lAEd|S>i>#s2ef{>ujk6@=0k9QG!Kfd&+l&B#Xl3arzQbTwG#l*!YJKy&#VYaDc z4@70>m}gmw3yh^m0Ek8j(@nU4_X?OiaME_J3eo>#(uhf5O-AEV7uqhog%H%7ru+C%Uvc--t12RRlS){1U=>FDlg!Ln9-V;Dzou07_vb#5ON+)N`;k zI5|~oDQq=e%{BU9>tE^0Ed;c#Q<&bq?(U1Yn%~_teT)@{Q;yjAEW3R|?%lOJwa;2B z@|NH8s?j8d%gKyZ^Fq;61w=%r)(QjoKQn9?^0UUKK&(zvOqJ2S2z9747wbKiVfcan z`f$3S)^c)=o$L_X+moEgtTRc&+y3RjzNEDQ7~qs*^$Ho&KKxP=Qqs8gyBlchcXd9^ z7yvFLRY+JkvRVpbSg{Xo`A9cPT0eC0Z+`+-yIlLYp6qmW# z0Sr5-m@c0YLG&)j(0g;Nf`|#NMi&CYgfHk$?`qMdz zoz^Ts##@+1>1z(aRacJhm*>ok{z*dm&r$=E{3yAZ;L6(J`|^E`=TJD1u19FO=ER%0Q5~s7k=$3(j|YG&xl_e7Hg0p`e_&&uW%B z0jQ!E==(m&!od}PEDPj3dndp@`U7`P@;1 zjdJ1>12_maAXQU<#>}T1ornQXskR=m+kNZVFEh||wv}=m^QCA1a{QC>wN)a)jx|9b z62csbNZL>1cD8BV>v3GeBKFKIzj6i_ze8C#M8tPPvt^6Ky;X~1X=FD0*K(%~U%wmE zB8LqLy|J3SrvU^4FJ(mOt7+D)7wZZ7`P33|*^A|w+ON{2h7N(OVK2KFXhRr++vQ~c zswsoh68HS_;=J-b;&}eDI{{DY7O{7kR-GEmko0G+Ge9Ww%yfIOd+r$`B-D@csuRdb z-x)eq&MxZ8@?LxXt5pff2$rEs(0Gf;`lnZaW(-CH!e|$B9xPxm<*4ItHp05MN=}Bq**9YGiz|&JWznk7o56 zt6;OL)ijwJ{x>fGqw~vz-L4q!{JE}trPe^_@xRb;_-5+g48UlW7aSuNR@xr+8&a-+B;^jRW&j^o|_7F?Y50xlq=d605&8o34^1Mkde30XF_(Fn@hV@|LzLq(ruMz zZx72c8}Y6I>?z4!ZGQU7$S2bNh7|tZ?mgx=Ec`z~N8!9ymTTe)yF_gvr4yqMK!YnSaDdVqZ|kr2&yXvMcBeGV}N z=$N1-HFNCMS{D11M&qXmfCp#N(N?o_h%Z+;=PMSUpP*&0OT%ir&9NF>FpAZRM1WQT zxquWVOo!EOXaj1sK0`fdLOAb}JW%RYk-1@Vl>!+ZF4QF#jRgctf3uAL^kFf7g^^^V zwIEvHIff9WP%VqoBYcJEZAvNW%96+BwvTiu&@=q+Sf(?O$wKd8l(_Vapfj31Axw7<*5rXyj9j1vQ3B8xktVNkiM_q`k-tqr^^lx zYn}H$7#T_hCwAlT!0Wfa8BQv5*ee&mW&Gsc-K`8<2+Z!4woDM2^Tzs zfO|9?<_J?n1e~9>w^E?<&fi)94qy*(lRf8XR0!+R`gF5{w#KG?xqEb&KUGhoH|ec> zm|cU|7bRwkbmKqy-N#l1$32Bs89zb6W8t}Qc8`N{{C#Llhf-!``7I33*1)a~taW9t z0eQnT>h9S+MIw0fo4c(!`hd?E&oj6jwzg^T_E-dGzC_93RYhQ^|JZ(Cz(q)2T_u%4 z{=@bY|A497M)ynj)`cnM%|fKzn!)WNmf?JxEqS$Z-PE!iv8YXuQZqfCP$a-@u)($G zqDk&Z=?!G?DHKJRvc5KWyGF#nF8kuadKnf+UmuCfC6g8Y?V!IdSbq9)%c{Y&>Ib&(`!ftXkM#+Jqo34k0r`!o)89+#~#g0EofE*`^8Sf#aYXPR_%k^}_Cdyrkdk5BTgG}<%r zO!dAz%;*%#F>Yt1X5Dge65r|y0q=QUf=62?I@%&`x z=2$~~Z$@qdm8bN&P10+4w|_LfV154mLm=Et(_EMyZv&82=aTtpMUthM&j>S}{p|kvBVRsHvNNQ=_Ms*}SBBGk?=aOY7ODeGs z5cM)Ouj}1qzMH=dMssy_YDGga%p{!nXC;Bj0A9qaRbCl_KfMS6*hp-y8MP~`iAJEU zcSY#gESuwo^FrgkKr~jTZ+z)`lX-T*x_xN^k7Jf_U&xHUdNh#GydT z__u$8+5KNpIsjq`3)L>^mQY9Z-o0mC-3L@NESB9tg9Y3tl7=gSNk) zC;;eXmyNFzNd=E3v*kZ2*Z1_1?k@Z6Y7h`zz1g44ueCf+=HH3M^SF^(oVGuSEo8#j zLm?vhEncx=1ITsP5HypqjQLDP#eakri3MR&#v2i1B#qPln#Q#X$Rlq+HR&;EAL|5= zq2UfSJ}IYaL2d_nE*KT_J8r0)OSgI8ZGPzy|E3tPHq|Ps?YW+>UL-xc(v53TR+L#7 z2Am9PJoUBJTJS~uvlXi%PxpQz!Xcm0<(80&Z7!g)0q>9XUOCorvps%Bns(H;6eDn_s#|+p6hv`&tkDvbCHe;j01&);`Y(e09L;wZ01bjX-3^iM z^&Wvss|jzdmBO9@Y@aixQ>L~@ZkHnv-1hg_&o{?Bf~@AxHlYx^y+v{O_qT_(wPEt) z^dpY&I?-7^VGKb)*N6Ap!-=uWCjt&J%`Sce*hc%+9edGtPSm;!wMM>CN$;~R*5hf) zs358nbsrEjzg5l24aqIfPYmJ`RwzyZV zv)XDaTUdnVbDYNetnABK&psW#U1>n0MDZ)763`Mqnz(bRzFapgGf(zCX)3hrgcg<& z&1`9x1^izb`XSr->P$=~O)J{nJ2v;+@>ug->+IW&$JP*0r(Mpi$WK;Gw8dbF(r@dNkQ^r zo8i~C&%D*Cg7MpE(&O^!y6VB=! zzGyBPSf>N{2<1sigsuIEl?y{dvnBPs{ps1}v>;@&=WC8to{?Bq;OvlqVkjhSZXsMm zJT>P|hqvFYvD;UE;#KTmY`YVKPQ2Pr^s)Vn&7aWRX3F&8qRjylKunm^wSO@T^i*;- z@s~vc8fHw!7l4xWBja*YhPKRW)yi<|a}yXn`_v(gJ3Pm{56>+V%e3C9D|cc1VI6^h zoHz`*Q*D6!16kEy`baM%Zw)$I7^!*|2LUkdW_47F5T-W&-Ink#mUfAi7!EwF&+4(| z*G)V_!=C0*rRzH50Xqx~#K%!>=B6BpSdE_}Sl->;A7IX{M_%T6kT|KrnW6+d0nPLk zuZoHH$SBAPg6|iIcy7SM;b^~ogzAj~{uT997yRQbEiZSnb~o<^DU-M029{5ksTyi( zYLwe~cm~^jE-7;?Efv@hUe0t&d?}GFyktD}wWqc(#|1D8gQ0oHi&SfxTLHf@rmUFv z(rKYWD?j&<jAVn*}NH@sG#a3bA0{ zY@1`IhBPWM;LiAag}x%(>A+{57~VxmQ>YPRzk95S&R6&XZpR>%HuVDGw$U55G-0lk z#Fy>&PDc;>EV46TNKz_^x`1eBRUPRnYx02i3EP9vZHP&;`s>B{8Oepg;*4Gh53bjziSEas-i>Y`Yle5JNx?nM-3UdE? zDrB`3Pp+F`#S1|^^9T|R>g(f4hnQ;Hbmm#~Y{emm zdJ~3;27D+0APIs74%SZ zllRI*QxpLN!c3NsjG^FPxl2Xt$Zi!9b=(w9UCWI7{4qLz3{Zq1`1aP7fU>Ff4-w5p z9=DUm;8;weUSlsnGnoKr;8l+ZH1BCUNJs>OAQuD&9_LvfD^BsfT9xN|=NqyN3w;Q% zZkB`JK99JitRV3u3Fx0tx4qjP%S_H2O7&4PjSVPrMWI0uV!)T%l+Wa5z+%!x<-`qP zVgOBrGj`oR1CH)CKuZAHXmJmg4#}kac=Ew5D4{Pu7LO?>msi8uHt6Yaz=f>907buD~Ju>>bK+Tgl-uN>`u zTXGtSQMa8woos0+n21?WXIL6T`T#=p#nE z`+B6WW5}tY_ZbV~KA*sMG^6&4QT`2?xr7K#d1g!A%_<@sJ^v1FXWCT9KZg={_JtLs z36gh)N5=v6Cagirizx3KY@^4DU=YbD4#!g^KZ~doOr8raY&vHt>zEvFB$%6Q8vMli zc^z(pBeyGFuP?-mrMc>ybsf-`$z;toUb{pEOzR(y##`8>HudORexCY2cgO#JCdNd| z*`UMJ$5;FO_ZIpeZz6UESMd%Wi!zVW__MHWYvV>J@A+9(drYFT+Z0A_B$V*MBAtR6?IF z*A@R>tbhNKvor5DJo$_ zJt+n->zN7lskgQs*|7cbErQOd*xW~~K$s_|AiA`gTLrW`)XBa)M(P;)X-xH1GS5?~ z$!I?3B5XL!d_;@bPcF469kf$2E^(s6wei7e{v9-UV}&pNO9;JQ^Te@5T`>k}83^bK zRq*D)q;~8!Y=9eR=aRZK)QIKU|Mb{`&+9C5Vy&QEtXky2q{5ZM{XyocA001MCYwut z#&(V#V<;(UXB5|6o%?%A%E>suag37}_bb5=dpTUz1=0mrn{BG94wo3*Iv-rpGiWK9~xIfR* z4Op<&*f(lpS$yW1)lRc|TrT&e_`L4qdNl2fdd>7TyU%4@Z1zg}N(C~8%IO*F!n{Wz!q9r78f4aH-htyTogKEn6sTl%UZ2S-+Pjs)w7F`?=Jbp|~SL6>p#- zq#R8E5P73v#1jZtTTXLIH5(+AioEJmo81BvEm?Pl>keo0rShxeIOG{CCXGF1BEMC* ztDZ56^s>bZrn#T}F!Vt(JhEeqRrl_5_G|YzEMR}F)9Os4M%%7du0;inNE1=+-IQ+> z%wm@epve`1G)d@`6452TQc>+~BEo9ftLpObj!8(nSH*_KY~aq$CPpLKVinbmqH zGAg`6LXBx}a$Y?^tRKxNv~yy+#1zoiQ2L@n!_=n^Xb|Ra3YSfvA1@g3ekw*M>k~0M ziXMX#b3KX}Yi7201NVQ?auA62`o|3O!?2a=Pab}!Fdc=oM&VjH&^) zl4HNd<}Z5;^RW|$9pS@bz3zvtJ;%W;h6r>fJ&iN7+3Zf($NJ-z?UB@)IA-J5rlPUR zvheWB#YX|v{oS?kfK1yU0G)Kc!=O!w`Y{Nwy|GMt`pZ6SY+3Rl6f1Oj$1&O4Nv5%d zsrrD_wCX(Dax%q$sbsQDG{t%}Qh~d4a^1hc(Ci?@6oCDzHabLEw1~;CHDkY&J1m_+ z9cwb4B|4PMu)X&(xRp#UiCUzU-&)T(U+2|fvsfh^M588ir8Wv_sw`oU2Az$ z%eTk1ySjjA+Q2@~V*P2{q?Mt?v!nKB!ubJ zK~=79C~2=To+Z=`PPa-+Y+{^OS%k+LI&HAR#xu}0 z^(M7bp2eB*ju}cE;#z6o#2iWq?L!7?It2v*S&GJrQa*mk3dTikFP|3F6{zH!icUoWedIvdH(4Jfb4>|?jBt&Y-mK3t^BPDb8;X8iw@s#wL03? za$1PYbU5K`aUj;W;saO=zmL{vXM7@#wF86Ij052!-*{I$RfXUwhYYk%VY@V92~8J> zUs!-aGxwV^%KN@x&x(bPiW<|=(sB>}!9><{7p5FkcSl#ynWFU@-tvA3rwR-Pr5|Z& z$81T9cwlDO^07qiv#h2POrlc}kcBk;oa0*|j`ZR;rJl6cz|1@Iy)92v*{tj6Gjp)L zE%GwRPV@8tlos2te@%EReec_v@g0z8nI^)2{$kh}cw`c;~@5uWN?fG1E#;gD$X+EOE zty{3Z%Z$aLRFFMd;ZT66G#}F7c!z9UKf9ZGuH5cd(1j`?;n$yb?cVr$zzpH*S>-JLq)>Xy(DW`EG#5?dMFdllNg?d#<1I2oxX+(^)2;rGk@-3QD3*ul#}{pTqp7BeEGi2|l6RXT3|iKa}h ztvZBSzj^uA@<-XWcV|AMy@b*26BX*}G`htpd|Dx2+}-sVY#eWvpel@26Hp7C2B0Br z;ZM$-|W+6c^DB5 zmCK9yP^eevUUDzBN5(Gnz%|A!~g{N?^;$JZ22GE zYuVd#qnvhL@)BV{SumnPZR$^-13_}0Cq-1HRW1vWO$Yg7Y9*dG%#}}pWna&IT&|c| zlk&1tld*Tm{YK7op?43n;9TNYg>AKmPW|NK5z^~ZkFGF{6>r5CqybgP<&CNuKqK?+ zJwxL<&1U~g`G6v|3|X8(zb^>v;c=K~sl05kBUbAyuSfVcUAGOvB+`kes~&A=4jO8A zjf8B;*PL7`L4gKLS?anNgCNnLN-k@H>2+B$#DORThWf0;fQw%0wm1suVs+Q}v zWDS2SlKZAtRXIt+|NH6-ea(>Yz4y@jcPkmfXMyL#6@I9kfv%`i&`3fO>LP7-OohIY zo9jpc!EpVHgh!cUw~^^*18hJc@nWO`A7wLyR*f%^Jze0==!kO^#{TDM6Qz&Ig5?E_ zD?Gp0exjl#v_ak59%!F^i2S#`>DLC3vprYJ4m1TTdU*NB7eT0~GcaPbs?;S9fize? za8D4~-NBFAET1Mi#Qd)F)I8tshq6TWVQQ>(!KuA*{iPhVaTc6^HIzfIJw!_faTXw$ z%DPcgaa;&O$YY8EUC~MJLy0vCM3TY`{FIMm_3cP43XCnwuHa7<*39-t;>2N|g?b_< zEh<;b67eKUO;ktxLOn_e-B;}4$1$d9`{C!HJMmR6zeY^o>$FF{>={o=|GX5? zo}w}h9h#(WGvI>j!$>HI!KuNruts|DXykXzr*Ri@3_V2DZ1Jt}GW&zlO|Llt)f`)Bt#lm z^nU>mL>h2@ULIH~*}X=uIicf#it*vYZ14Oe>rwVAhTnt5Mr+73h;rX$+=(h5Yf1E* zS3yK>r*8x46Pscp{-1)AmLWk-`Loh=RaaCx4z%5(?a$7vLTHot7m} zP01&-2xTMRAld7#dYgXC!VROIvQJU^bS&Vm@j{F+r3L-COKVCDiD?$?a%`b#X>b6A z&h1Z4=;eC)wNU>cvin(7l;`*k3hi~Y9KnJ_peK9TWKLr*1Pitw^Gyg$nb$~IeBsy( z(#4$pWO115r>LS#WcnvW3UX{8&`+$3n{Yx>kr z@V9yu13ZWOu9OeKelI0u=?+KPeo4}oW(XrBIM;U^&C%;5s54eK(B&SNSi=<^^3zhZ zyl-X{F|ANw(a|dRK6VyY>uS$-BhC%*T`lAwAty{*b7&#uaCZ-MeEXKIoE_ASN;E0P zE3>JGsM750sX%pndu4s^kD*5EVx@lR;aO=+nlQ5icJ9~K(xl1E@Z^?Q&nBuLh=H8& z>dO_z_zl0Gja~O&+E;Bh`oAjIK#dP+#Rb)kLtTJI!F`snZJJl+cw!fT|Bz4;Xa1ylZ zkAh&A?hSJ5#_9Lv?k{DbtwX{CPxFbr9{3*O0?@xL)J`u`Mv^;r7@0ejvfBj*~;cpa_qrV zgJ~!(4!E1%(+mk!r(=F@CTrH?T1;;Z6p`R7fdJtf%_`P*hSEp@QhJ=n<_8Fmw z8=P*`QvVzw8Bdv*G&Dgp$@PUTA5y&Y8keNeDJAXi3=Z*Ym^J9lo_G0J;XxJUo`y)D z6u!>&$%qF@cIfU0vz?s=?<2>sAAt4qzBzy)dRZfaM>6L;O{MbEl42qc0WZ}%KDd|R z&Dk7Cjr9YxsowKKzEsF_Nbd(rX!!7zC%m`7D|lQNcQAnp=YTnwIy`sC$J#LPxHRb) zK%TPI!!k21AhUTPJf0d;@+86a;9J-xp@VvKfFz|;E1w?x(S8!`T1of@1(q&~YRWl3 z^sB#;YxizSp@=9C-{l);A~grse5|pa9*6`MWZsxf53*VeH~qoAXdPEIfTkN+hsrxrQ1jzC?4-b|Dx5o6BY$PRf6nP5hPDEXu@*VK$Pd@T zLjrmVZj#F!Xs!(MC!T&f1oKC&x8$1NhM+{c z!qy4&G~l6o#)uQz9lB%-HJQ-!1X_xGyDi3ne_QX_%+QH07+Ka>4~3`XgrH^vw`@K^6qKk zqwmE*guImcq8oiwFs*jNICI{QUTm9w%dL>p4_E5Ox=&$H^iDd*;1~0{|UL0welw4u}al?jJdeGs=W=L~68|y9yC|m5aWR zO$&f1?j61#-UkwO#bPKE5~62Z^gs~e7YJt%;-esijAg~qsk_o0T^c5S)#&@YnkImA;3jsCR(7B6}F1MGRB9F(u_(H&>- zXf3P8&eP3npFO!?5)}|!;G+iw77zO`_ zy|?g+vfKYa6+xt>MN-KD0V(M&>71dYOS-$H1nC;OJBIE?5P?BKI;Ds1ZtmkbzjMxe z;{F47-L+h-?FfUjpPk=Ntp%c>9iJ0i1_X7ZIDRW!3U+dQFK&7k*s%|?pSLSy@}dZb z-znd4kio=%nb8mNOg0&QWMV<}s+{fnI$55Y`;pbZn{+Y$u$54Y%?I;}({8qljh};} zrv@X@FEb!mb61wtY5taicgF#n$mvs8-YQSq=sH2nWsNezojx-AMe;<)yid+{HYiK+~BkgNs?_eET8zOX_|0L~Il- zg}%2xQv{_21=8an<@*|T!R&qRq$FPmGOY8aWf>{`|4N@aLcU|?=Uzv)c#v3 z#lP#B$&aP$<;@g6566QL>=WgHI&q=P@e_i|yF9q;;C@mclRq;O&XwGQE_3VoF1SpV zyq2QHMg~UZ%bC`=4G_Fs-o8ZgPfX%=V-b|Jy~y;5s_!D82h_ou0yLu4As}>IG>XDV zRn$a}4rC@{UCdf^2iZUitV}%c-sunSUw33e5L#qzfvP;;&A{&m;g#MOd;BKkHB3b1 z{Z)@%kOL*j3*Mq&Oo1`!vyK$ol@wM8N$@l?T_h69)Wg;Ws%K{IMP(UumFj(9h~8?E zwDk2=2W3yCO*D#x(b6=1Abx#_r$nVE22$BsQcLJ zI}BwKRJ-w}rknb6r)e-rXtGF~nzuqKr_BAeRjTAuxi-H;|3ReyLK1QRkv!?aVkP0R zf}iQUa=v$GNiP_AFO7hlaT-C-Q*~jYd1hpyhZAn#KE1j|@~6J{9D#he+8^rik?lrt zF8p2JvSOdYd{(@8otdGeSPY9pxfd;V>dU#!tiowenIAo0nBn2l6bEvA$@lXB~FXV zdFb+J$7j;4H4>;|V#~|jCrOCTbbH!PCH%lH!|SzJQ9C~7I`IN4>r2)9ucubVl2))s zYDJ19guOREdo*LIaA(O?{Mw(JNqR4s)h9|t-gJB$SeewARbB;-KHp_Ie3H4+)UMVS z9QoFnSQ5M@gs?y7?w*2)^^XdpbL_71#-AjrC^Ja)HeoQyUE&3HW1y>_a%)Pga%f%A z1K!iRd#%5cUJq#qW~}_I|GBnm`f7-+^YL>bm+gq86^x^5nO$3|w}_GcNK)L+AP$RW z7y9Q11p0Z>(JEyLyN-Fk%5}IwVEAw3S!U5t{zc!L@|OY?JGDFGC9B}?QRMuvbd7So za^JOi`?O9g^0vXpn4fGYKJI|Sk*+Y_N0ITit&oWR4KaIug6vqb1(JH&exE+4{u1AA zQOtrcwo4X?DMs{AK6aLo;mYou6C6}C zmix#(nGIrl|3*|^Z@s*gcX*1_ezkLGk<>eWt)FF9D!!&W)A-FQaQ0Sgf;g^h=G+E=O1F#dTOYUSKI65Y8d)Qf>p$al zF0w=j`eMuTQhYyqR~bl4jJ5tCV2TiydYd?#(VgaQJv^=FJ-tee?o|=D&FWKpH)H?K zI9!dywKsdrp{PlDe`3B_cX8?k>QOp^(`CE_ro4KLa5-c1vk5`Maq&lHPefHE1_lL( zoBT!Z%A3Pp6bwimDFzyi@`XVwYI;!{YFwvG;8ri-Vo1hsmfO~F>0>}cpl`cApFC@% z%}Fp+)xe@xR|S6Kt)MDK{k)Puf3DH)Le=x}-02oaTY_rNva|5cs*+SOD^F5FM&2FaJIO+JpUQEw#Z$h)=R4g2r<(rdLt~d?QoBdWLEzbhS@D1$0K~G)3sXu{` zr4FCGWAvT2!LEgL1)ihA2il92(CGVC-MzvsEZuurUFH;77x{{*eNXk&A*?au7OKIV z?b2DW`y7F~vlz#G3`sIF2>+L(oyYVKBcLX1_q=xJ8O9RLoCpmS_eXEb&OFFZ@o`gJ z=@fUh*>Gp(nET`%lDi3F5vUXk8Pl_KD?5hM34!Ihu_#&mnb>`?Pse^KmU16IXV5lo zC1BhvrG9gttr?1r5fy28L6JVq4;qUq0e2xmH;eVNO&5PUA9_8LV|qV#lh$0`ZLv*A zn8-}WTEqyemHtKNTMKL zDG34~Z`r~)WfT63Bz&qm&~jy_Y^mUtviHjY+DG-$K1G*!dBBZIk^UjAW2jZ_9V^-FwuMND_v*L|2`5pOgq(tl*|>r7Va zFaf5i&)AYSg|UR%caP8K0t+n03W0Dm8!yhDzXLdKv5yke%xI}E$L!;vWs?JGb~y2i z59#~s5)&Zim54m>Bex6n7}MjV=NP4>`r#pw(F?->7Q477U=|> z=d0&cDrU^eOzSg*Y;-?U>q*NA%h5wXpdZ!63q+tbRhZ3mox(eth5bZ9viRtmEw{!d z16MH{`rDA?kT%*uh$kVh`Tpxlp5ai?*hbS8ud1%zN2egRb*K+~&K=ER{(%u*~kMEu+eWPU-k(o#+ zTnSXTGH-oQa7q>*ctrJleL@Z9t1@7~=KcuhoG;|2xI^B_9EuiMjmnMjO zxR}+Vl$B`wX2`!jYd94#bMAa;d#!k;^DLn{R;A)K4ugm=1?axdPHlUw>L5P zna;rBN&(Y*3JbQfd9m2NI!4+A_;*i1xPGC;$n*x^<7CgXc3g^?0lrn5=3XguZGI2F8N;0q_g1+H-+7@wvV`^f?B*I2 z`tqV`q4B*i3Da&?b>bh(o6Lnv!EnjKc9cII7nCTDQ83{NwlOpyM| zqt!oTv#Gs{`s--(6k=Ar@vBb=0Crw0tLzxPTQ9Ylz%HZGq59R);B{eJcK@+;3mcJHKp?#t^_Ur;=nDn$*iUW_7^cj{ppd@1nZovo{n+vhkBlUc2*>>aH8b8@e>v;)5p z96EdVh#ej`(Qoh<0?OV3maqbbu0U3p#yO<`ENv7fbN49?xv1|sgQPI&U53Fnafh%& zd(kadpQpY56BA8z324x4=W9kv9LKY9bdMO&n3+|`vU}*~56T^UxHgaL^$PZy|m%z{Iz`p;h6d&3euEhLja0Tb=!C4K3~Z>|(`q zwlQs^r$&bO$2;8?&dKed_`;zC<37dhvE^L~SqlatZ<%5r6t&5>dxSWpi={VD;9MWL z8HfzHu2xz7HeWj^{Me3u#tqX@>k}{G6fH-#NN_!L3i0}O+h8+IbLAYo7Wat+zk(-VK?o;u04Su0< z?F}sK&2Y0brPB<3dijxy$9M+yMy)Tb-Y`5r(hWJ0_p2_d4H;qmiC(SDu6S#ln(DB2 zJQv~pJ%Zf)>WsaUdUperRIwBS`sVX#5k0RHeVFv@L-};i6nXxKl$Eb$d&v|`1*tb% ze=x7bS1uwPgyo(P0B*N%9!I70W;mQNOk2GnKeE)u3KY8m1I1dw%xPYzCHkg zUz^Og+!jeXxx?x_?CEoI2Jk2&Z!DSh8BCYn6x0RQc^!&^^gG(H)BKd{fRv5CqnGN+ z7JU&U<%L`(>;vtZw_Gk^Z6`zgHP%7{lkWui#&}d_VdXE^vA!hJ+L(Wj^TF z-Kc&YFHAx`P4Yj$)L2Z>s1wG&})8T8GIckB!5dq}(7 z2@HPi;h9B6oD-PV2jN^xK6FHT(7DW4s+7k;T0{nfG(pBxcAdObOHgxi4sdKF^odP` z&gPsQHpTjcn~wu#eWih{L7(fzAh+;sX{F$I8hVdS(WK)kvYqLUdNO_o)hm(NlJY16 zk2Mb^&SD*E*}^L0QiAVt3_1pA#^24ydrw(|(il-vROk#SmphaMO8Ybz%(JI2VAS97 z7={Hhs7`|T(;^!xh5>PGUSI?IKAhnHpIuD*E&6FTjdD0hNSdSAYICU}Dmx1DYBU-U*2 z3~hTVFs73&(Tf3f5Pql-&NLMo>nzgA!shhz+j`A^eg)@O$qiDil_JzCP6#@D5|xy6 zxzsE6Hq1!>s;}?T-S~*hX4ZMa1`FJ>gra0w z^V5pnWY0DpWIx4EqI`VQobx#vMEjdM+Sx_DiAss#h{B_;OoVM!&iBS9Aa4|7D~s|l zz(aZ%lSnRoW94mnq9i7KpU>+bx9Z z1p*19x}sD_F-)4^DT;9tOY~*?zFpfi$+~|bXT=XM94#%*X^BLZW=1%?epHc zAfP_6dHH;*btBKS#3`P*Bkc}kBF`saFL*JIKgr3bMiI3dvc<%DI)zRjFT~e)*vIaC z$aLJf@H;=s^P@jb4Z7@1;Lb5T{O+trsu&S6JG5(Aa-D>FJKRFSKpBQEkS=gyqVzSe zLs+yEqyno_G`fD|TFPL)++_9@{Th^@uZ%6aJB5*`=Pn|ue?+h<%;bQ4b|x^q8#3GZ zo3Ng`KvrMJ8Qf!}8B_t$vdnAC6BM_>+u8Jwb@ zR%O(uhR4UOV;{1)Go75m+I0kfpfcJ|&*$heHF_D_J>R%tERxAg^TOf56KTNGoxT)# zwZC?|gIn$S-W`Tdg-+7BXRqM!aW-u0{=HY_4Uvnthlnu9AWd=8!!~+9qs}y~S=gRz zQKo&ug#lcXZ0^{3lXGtFjU#;F%!(NA425=X;TtSUgU$n>00ON1qq`d(5m?BwnYBq> z*?yYu@?#uhk$zL$wNt5TA@V(Dtu_}U{{oJ?$4Co!HM9op^3qtBUv|*!LhGV#Yq z;=n=2S?-)&cqn_+$6eIuq;ahAy95V&kH(+*{ge@xGAqmp)wQau&b3bYrXJ${)?tX|0X#KCVq@=t0m^DUfz*Jacx`m(`@@eRtG%z$f?y zaC`Lm^%K{xlHOAmY#RHy#6#aFlF;&l5Sjrkm+^P!`q8+Z&+eN!=ZU>ZptkawU3eZO z;_7PSC^48c>XmPD&FwKQK+%r}Vm1Z=mfo5<*H4^)W1;JhP1xod5c6Pk!coWvQ@E#& z#ROQW3y{LVD8y<$Qkqq^pSRrR7FxgbFzh-}tu<3Xe7s0c{mu(7Ul>ffV@ly@z5dmc z!8a(&RX*}Qd>tMDlfgHg^-@&eC0{ig2;r={lucnT&}cU}0zAca<0BW&)e#y#*X8EKNP-O-G5pc3jcfnjW1H%v1qXvU%>*hR>IWN zSUxo6dry&%`MMqc3J+Q7e-BLcT9y|~zqmQ_m5M4$HjYBil`@2=-9?w`!ahoDT=Pej z=)S;@WhD^uj}Y%?W%ofLtJ0>fASrZu>bTd1bB7xBvpScwa8t6#fR7j6(;mw9xFh(r zk#X_|^(p5_`6#G?OC6?bsf3TyRlLdIjtI#h(m<5gv*f&S<7Qhq808( zu#6)-y4-sNQ>DmiwJby7MxOPlIZs$L8k*^DTH%`}B-~97ma-kNOuK|k<@d=q+uRxP z-9j;K@-H=4m$j#bmF0V))4!-aX7WDjCaLd|!qa7ee|P_Unn~&8qj2{_2+n25Fz3!G*E5X(nd<`f>%Dv#Y4f)-_CfaEL7-lUXL)Zh>mWkQhZv=w8X(7^KIXDrhB z1ie6LTt%(wq-b_BlS*#>>DHXaLEipH%Iy`Jc=|gKU@bEllD$^V^r+GH%wHr7+R#6e^j(W?U}fh$d_}WV9n>ynUc1ih zynVLqY4f`Uteb?FIrA~=BU2))n(%Ghj&TMQ31(V$Q|=5%^cuQzPPx)C>7AL_X|!j6-uN5iCN2TdJn1$@^lkQe-Efu~ius^g1Vj zSe93DW9jkwunfm3j;*}@F@ZqwuJF=vIGFD}6Y6jqP-r5MzYKcqUKT~WohN%3NFw{p z*aHuIC6%CLa;+iZVSJ{>P<;3W-5|SL_2R8f!cv_`AdW?Sg9|H z{1ivm>CO1w(*kEUBzIA;=8eKj?n&5JQp%5OI`x*p;@Sq0sQ$v7t#!(9 zx>lkWLamnSm9|QFw98ne+?rQiVT^wFx3*_LW2vP@rm_QT*4brQO+qa`qZ|`g9Sb?G zg%V=dS56eGh%-&gONHO-nFPRbACQLEEStYE0MN2qyZ!?f~R(v;|oGfdyIeyCE` z{(Yd@)wSk^U`#@K+6QC}8OLeOr8hfjO=?0+Xr8#DJR{Ih{^cQCqDsq*dMzchmQpAh zc%7+Y8#Nrsm8y~ZUTSSHX_yIBvwEgLvHkj7xAL&MYJ$CyL5{Iiy+n1UqpYXiYD%@N zyGiGr2Q50g(cY}R^tY;9MvabuMKVQ(kbFg83&X111Vqf5zCE^!9R<S|^-f2=R}aYO z-b%n({XZ?@-_d)u5j0=$JoNQi>l4O!Rq&I#n9_jo2an#Xa2~gFm=I>)xs*Y9 z5{oWht(8qUA;!?FLYLgEEzmYHbT}bs-?k&{Vc3)!hZ?5dp8#8xd1f2u zJs5OuJ_FI+vu@Br6l=ctsmIQ zXWwg?6$31&{S7u1LP2+C3Tc+oq0iL*{y1`z9_g`jbyP;(&$kqi0SDe*kSa}dHJCQ9 z)TKa_%JtCqXhup2L9JcZ?s@$DHw=i7SkOKsD@@L|Y%_Av8`&O9n6yPe>dQ~7cqip^4ajuHzD{P_a-5R4_ zbk^Rf-5IA{8Q9yxN3*62ZC9h}pidFm0z8Foalp@LshL!FhsBFO06AJ#Mak}@6okNf z_!J`$n|yXKG&au}ygr=8YBd?8G-dsJcJ8aA`gn@R--q%g{U>OHn9Jyau=&5~OrN6r)p6+wY6oqy z_UGuMDHFVWpsI~?Q)b=K{lh_Dr#0>wJmr=6=O$Wm!D|ewGjTEood($4(R9jKDK+hC```(4~e5l+pFhXg^bk%NxJ^QArG<* z(C2&L?+HNwX4k*TX6$^*Sf9J}H4ya-9)u(C3j)NaY0NZRB?-M-3MMuIY`8ob*T@Pq z1C8MC2Z@*!lB=@N(#gk| z%3i4gc_Lbkwi#gJbPatFrvZpy4n?-u->&18UAv&B#u*R;=Sd~g1otsGnJ1sixI%V2gk=GB4B zlL7*EM-kA9eT7QbtB!!0=P$V|6Cl_r&ea}s^;Q}lvu36!6x32t>WVXdRS9~yQ?beumTZ~^!^ zW9aYjg`Fnfki5I4E|00xzflspf4{4yqHt}{7`tUv2y@+Q)3CMveeLU%tn;r$<6j$} z2J#AQ#VY5gIdKI;y4OuS5F6P9YAjSpv!xx*6imB49pkas%9$1m7%h+;n6pGjq}g?B z6=-^iXxX%LT-xkWEyCqd0OZAd(FK!UCQd?WVE0TyiM1C?adWh}_L%t{1Sm^xx%*PZ zRt1Dxcx;!@Bg#de{4S(Q<@a+0G;|7Yyfa11_7~>YGs@Sp;aDB&bOCn2>jhJK(jfa@@0iJ zo|N0RpAGT+zB?Erj!waHAdxvFwI%V#BB=WQ#Wnm3pdnxB#VB2zheY`~l}=QQK2^7a zeY;%ra~tQKK^apgX4a;OzSy+LXm#lEln$oyuE~Hfxvu|!s%4pH0Cu%gHvpF8xHU@q zQ%I*__p$EHqnh&+nuLz0AvcGDX8B473&-6UPkyj8(mXGFc6mKi-NzrSknXcX;yOF! zV7RFJY<1W>Wb3ZV`04}^1y0u_`AdiRH%S}J{#d$|XtX90!eFf7Jl_+}&-!vCU?Y_5 zQFspXA*|{qW=+R^5oW-~=WWpC58PJPP()vb&NPN7c<}l6343`)i{p{#?XS}TR^^M~ zkKVup$m_jmk3+k918J3tk%Tn>V?C_1Q%w&F+vji5g5?-C9w8+JH#=^ie?hf9267_L z4SA9~K9?i`&ov(K)0!^#v6;2!lIEtG|LbAx?}Il~1G)GzOd}~RGBO<5KtXft9dxX& z+Ad4TlMSVI_@6TFe<(Kol)br;947lTyjx?e?QUtS+>eCyB3WZPKb;;dE;41}{vFN! zx3AB$p8Z;364Kq{nVlpUP-|iaoR@Q~gdE6hDl}@BG%DSzR~+wec?Y0ON$OxGBcXH6 z3$F#7bPe(U|hUe{Ud9aq~>Q90E>c)hD`PBogP^{*@EBB>vx@5cr(- z^j#%U`vOhU{}8PI`#aJ+mwu*vL@3QdllFIl&VT$f@Yf~9PhqTX6!X-7Iqm*)_5+Pj z>Ob8`O&s_0udn{KZBH-Ko5YW^AN!ZC{QH*t_pje@0qK2jEXIJ@Utj%eyXlX-Jp=XP zuP4sGwpX43;`NpOU=xnNzWUepJfMF^3NN4}u3nEDS z$at*>-=o_f_HXHvDn4jrkvgufV>SEetN|ywaLPz?ra$6*s>J7qT>G=Ddxl)9JE>_r z9M;Cd0z*MPW2viV;qvmALmB*#7gy9mejW4GLC?$nqtl;jAsxvc1*4{qZLkgoV@$ms zjNJIAWazIQ0<^z5A{|K`0iH-$yq?NFQ10L_sa#HzkALlpVNL^x}YCeKPTu5 zo@~pw40M!9kMnL}7YgJ08*k^o9;n1)f1b*_X7{Vqf>YT{fiL+oP<1?-Y!?lJQ*yKa z_^|)zyYC^Ld#<*n@$?@m^+nXY&Tjw zGmQM})BabRiVy9ZRxdFbk$#7#Zf&3vEBgQbj{|NEub-O}>L(@Gv4&b9zVis?armBT zHYEP*Ch)H}{e%qg5`k|fu9zQxhDRS&tAKfk$3gPH-}##ouxJo$f5%E%VY0vjhN=0K zS=pbVvb6u4$8E&a!P`yNyyUOL@dC-;%8)NyWXJL$GDS$?SVYN~Pm*fLGWfprB&(xK z2Cq-+HwFLO|NZM?K2ah$=rJX{HAE8rx$)}%*5^=dB=0B0^#UoR|N5zKl1KybbZDho zw~-_SEREkBCRYZ_q5or{{QF!Eb^9~V-ICI={z(M?>m;Z0zZ3;@^_LDAGqQi#6#lYk z{QqwNESCSD=tR7}R-%w!OLs*q@WX*LBt8ze^@L-iG{?sUk;7l~?mP-Uw0O$Z96h>7I?*M{`G9vEZjpEe4t8clvD<~`0Q;Ee$l zJR{V{t)ghfgN#H__4%311H#%56JdsCuY_&HKpqahJvDIGVmk zKtJQM$t?kwBX+laF|9Ut;2!QM0WeV+Nk)s3fZ#Pb;0&~Of1~pr2!=9hP<`{?wlGvC zz^;Vyg~g)gTinm+8alb_HPQ9{wIgU(E;D@Adf)Z;ExQMY>XAzlfA@SdhRNnl50KVZ zX3O>U-mu`2PKQx}_uWsNW93%Ve7^^wI8F<^M&}DSU^8*E21HQAF{r!-flDRoz+OF1 zlc3hKbr!UK_iZB(Y>xk$UVcEQ9832tYX+~(to2k(kVU->1C|dx+QXmRs{jS4)2YnY z|MrRo;M_T$&-2c-d6cTVv}+?P8d~tc5)afV`saE5Bz5X< zPJOgDeDr&++1(FG|La%#d&P>>WGJh0yyB2q_%SqgyKW}{L?SN6I)mchN9N9~1*Jwcn%uDQI$wv?6(>uho63H$}|H~^O^-HKeEdJNGA2xW=Ol#%0H+O@73-iKfN zj)(Z|zKdXKA^&T6{j*(*e4vhbXJZL?I?0Rt6w{X7M835W5gSf2|BS0yv)6=qYuJ2l zjzBB0MYOROi%XX8R#AG{-7;nf+3jOGuFQi;?ss19yd}`^XXsGg=U>%;BjgrduGuon z$wYtdg$XDuC8{>~uz8iq-iN`_ax!Jj zQ6PvGoDU;_u)`c3GFQgLAr?CqSU$n0fwm}KNr}RH#j95=no-KPk?Bz83? zze+x;K>RROI&dN~T3b9hk23_+-d!HhrpD7}4cYKW#8@=3r^C7Ooi%Ir*oiu|SlM4R zKm5j3W6=A!QGRcOS zgXFyAmBl)dsf;_VK|`Ue-CcwBI`d+s+eLa419ab^cPari_9jaYFvxRL_e^Op)@ibT z<$*wKAbOKNs9kj2U!ww5u?P|;lZ_$WIFOnYKmf{17x0jstXyOOVmFhX+~CLxAr4AS zzO0M_Id^;JgPDGBmjQ4G4lGEfH|M%FHaFOSpgMK!N31UIGAr#LSCSJ^*PC-25{`A! zKkhlZv-NVsPna5FsXME}cE?NFwXM5j5{HY34b%-SIay=7Wp?Ju~Jh*0wAjnB_0USKz@L;rs8-)Vxpp zK|-zWYdV;!6_bd^_gDxEd0>Awa?W!*yK>AiUkUUj!RX1@OeT5lwCrSsp^AkLbEk<+ z+K0pg0Z^8_9;^BIxFkcWqp2))`%4cUgOvCi0cFMQgdcP=l2I75>olQtrrXk)b5z_DXYx zftBOD1U|JSIDA2Hf%Sgl&6xC!oUl7=>51~j@ILIvcPPlou5;cKV?C?CZ+RZ#m*ZGx z9^3$Z69@L>W@hmiubbp|8z)nx#hGHv7tTuvPIs^shS`yqX!Bg7*3QDEb-gC0lv$?r z+8ot{p8YcaUAuVCR5oFm9)k_nhN+6W(S8xKkG@OevZ?^D`rdUDvP1I4vw<<9T5d_W zx9bZysMaX?95eYDNEIq&(E{aMC{V&?kW`TL*gXAGsGg&$CgL@rd$C1fu z5yC`L&cCMx_VUCuW(jM7PQFLoNdX*P*s634dHTzO{G>4j*I?SvfB<%SY zC6|0&=WVwF_uRTt+v6o|1p0utOrn3_0yI*Vmzbr(%Vzd8<#?}AxIalhDsH^Qf`Vcw z&|ICx#`PT`;HEgs|E^gZDrYLcPya zS@h{Z8T5H|vqTF~fp9yBL|CQrj@|ZBGZr*YFe{yR6yBf7Rd@W0!mqJM=^ZO=i5g9; z9aPnE=DKIp?T7HO2EOX4Gn%B0ori0J^Fs8z2Pn(Oz z=i2$P2Jxh#CIYtxZ~^D;O&Tz{InMz2m8?6zKkz!e|0;%p7%x$)aVfK+RU)H&XPvLg z(r&JZ_R$NlE#4Y$ZZ$3W9ABzllD~jtN(h`7kbr&f#m!Qn(i=*XFA1Yj9>;7&1eBKp$#^~0*@T(1ypNmP?ytjD zv^Y;*|K)l@<^LYPEByDm!uBBG?u)qmqGoAWe!@45xjloNsMqX-2z4g;A@Z~3byrqzz0vf76DpH_;!2gqt{2UTpYQ2N$M$O|J;b~e4^RX>ne=+Z``Y3p2~1|GbX z&zmoX)klb^fZIPp?${ir_1Yce&Pzcy=B#?PxzSlTICis6vVBDgNgu9`j#&;64Tu7b z=>&L0N&21W*VLaZRmo*V-T?~wyPi!#rMKzeDth~8A=-W+FlY5*vb`%V6%1USEAiT* zW*x)4rhsD%^zuF_=5e7`0b-%9vxA~H#BODhs8l|)iO`{XjKy+GX^Uoh#q_FUr&J+x za_46NkT?=1==$bXsOEJKQk5%^2~UhNsg9Q5h*snJ)6-+QV5+WWebIFAjI-Yn0T0T+ zJu>SJ11IJd`P`ruC>>D5y$&9Hji*Mk=&|u6MbJF+T=Z$*o$@HHV!Gx6<#of5ch{#W z7?@lkyqMZ5e=3HN4WvMrI%0gww9%>( zQ$L#}sar~lz>bL{tmpA7?5&Pw^>H&j5!VVnoOIZ_(G4DT95b+F;S2iaNvrVF>rfxcK}(8sWS2Ji)2nmc&tvSY?3QHy^*9*_>AnGspoJY|*LKsa1AqP@MtqJNqRj*%Uw|lhRi8s4IU! ztsXH2bgjH|gq@*{`Pzsyp9PUw+oN#$tlhGo-g|%j zuP*PPJ$F|;8Q6kYw;x4sL6z(U*nY_Zj^|-&5YgV5L9EkngHv`^y;C*_zwJG=vfsDf zk_mkywMpeP7HTaCv=9kQRI!rIN=B*Ad6fdjRsyibI2u`+Jh}=(h9(NqYLSjzeT^IH zowKdiyydsQL^J)G;_6CPIv4vU-(8nZH9Kv<#J9S=4YhD_k+ap=9FKB$^tf-%_wFtl zs}8i6EhkDfm&HSHbe2Vs2CQX^J0J931Lu5aerB6l&f`g)%GO?#+9$ml6Q$6zzC{nM zT9b(|R{5DA7jPU6oAA?Vtv7mY>e}^|`w#>`r5d!VY%hjxD2I|1B-+6bq2|_6A%q$=G-#&NK#;iMobflY=Datbhca9gEPSe+OfA3zh*suzjnvaEKz`^b z9|AtHL@=C;{gFnCeKFAo0MXb7<{Px2IE>W9tU=LF=!O;>jDl;9`zYgG;x2?Q-vhdt z679%iUw~u|o1R-^jW&ojlI1!n$UqjXI_+dEb(FdZ_h` z>fq68l1a$%W0&P^EG~s1pNRn<8Z@!w3Bf>2aKmf-xAhKq{hE3W6e70b9za5S&I9Y440V{dJ2$orcSObV z1V6=emqI6Wybx{#!Ue7zcezKKPmDbgKa)%G2mb>W3DrgZctof$l_y$4c5F56EUhVF zXxc^uFQt>4U90ilKub9vwk=y*_$jNo-2ZgDZ0pNv55(p})A1((m#ac&)uBr@#-bbk zs1Gq&MApO%B~_)yy|0&XcERCh)nt3omsart3&qK)E6LifjGapQKc-E1t1n>s7f&Tk zk6L>8hGj5Z;VI!$1u8Qi9GBrPDYf^Z1@k?n(TJ0fq>U6MlMCQNS?MT_*klqex{PZc z(;De~Go5pb^TlL1MzuUDWiBjwO9UvaAZJ}tDZdo;063fA6fUbMfeULtnf%)fl-C%< z%nfm1nDP*MpE^~JYHEY+Z!te?a=u)s+77pM!c6Ukb{Sj8=`rc?$l)uz-7x4>g>Jl% zPgX-JlA?`vV;qnnw+$s9D=UD$qV(aErMnjzkR{fA-0G5{Mq)5h3g&1J?J}&#JSF17 za_RwcBDD7`r^BK(3y#~}P-PMDpg^1lseo zES47{MX6FMc{CWKl<2$;F?a62DT$+x#Z?+7;GvUl-f1AKC(u5ZH#D8hGgM(J%~VbH z;WiG37}zZO$m*$5wkCrj&v}AQ%w?#1ye1OprJDmhh`68;w80Ygqqo>Y(`lR*`Q8!=6YyM_ z_(GXh*K%qahr=a9A8b4^ho_6$ZfcGjL!e7e8_%rM2YuG7>oiIe0KsjM#UQ#RuUoO& zNwgpQdGm5|iU_QT{&kgJQL1G|CcdRmDofByaY_0|Njf>dE6N8&mU>KQAd3RgakISx z1$90wo2MMkQC{Er-D?4syj?UYG5-cTtYG^eHbWeW6h>y_Xudp=PGoaoCn+fmpJ2!u zEK?+z*p6X!E~T+4Yf%~&?WTc`@X(~F1_ZrMQE$%koJve3Y-0||*0iXEHA`KTGjFMW z=0^3=p0#1 z!fLxSA8WRV&a8pF&i5O0Uvd_&PIvrazmz1%!JO|QauU29G+t5mJKegi=R9;e4VXf$ z=_r%7Kus=|%WBe-_I>Q@VCRF$-Z1^n1+VQ!mzcjE^8Z}^c;u)!f6Au!F3(q~T5c~b z_E0(ty%4poz${t2lP()ToO2@%o0%L>Cdtc{2U1yrmC7j*s2Lvd+#go0uad%b~N_7lGyioC_)32RC zZ>8Va*>PIw2K(0#ptH&Tc+Q|z(i}2>%>@8o?6!m7^~YT4PQ&rls{j1k%hBv2>q5=rBE0oVL6NPD1HW0q^ z2abpRbz8Nk`%IOX2_Y1gWj#Q$S7N?Q{IdjqDWN;-Iv^TKRsk1ArB!Pd3&aohw|QWc zgJ~4_ikKlEILF>+d92gcw;1WlKpBHMttO{s0j;8qC`ruO4!7*Ze*1XB&+m>`$EF2u z=NRT1Y#MK?wCICTx^m4TUzV~wz`-uR^*mg%>pm_>Oi{i8K#Iy)-Z8#%*S&4X|3lYT zhef%5?F!P}A+3U>LkLJCAxa2Hhch(NB{>5K(y7udAkr<}-HddXbPqYe5a-={yMO09 z-*>+2;t%2h-gsiIb>Hj06Vb?U;_nsVKT%Ho4m7iAXndzr+(j9%10=NXIPs6h{%>s1 zy6~4v#KN~>ejvg!Aw**@eHls5cG!%{9876Zo5CMEfT=VM#0dlw&GkE_`hr4gt>-uq=agPi2LytTAG@Nzy{7fDK8JF+P9XqnfIw*kI;gUi4x6tv|=mdaNruRoJ&$@>ScM}4i!czN5Q*n^ zr_oni%})B2-P6Wplv?y2yk5FX|J5il=A7~G*34}27j;4)RS$rar2&TU`qZ35%Ts_I zrwHiUxZA0(>%CL9+ncY;u09X3QUXEWE%)9QmS+qQulmII#oSdejT#rVPoIqp|5ZyZ zH4S-{@U$3)Tu3;+MKSedwNM*axgE^g=|PRVdW-Y~1^^e-D*r*n@91hCO~=P4{N7i# zN=N2W_U(xlm;1umn0P<;=iHOui+CVDufa3mvxl=)m2SgQ*5mJMD*9tNcvR019|3Ko zHKSfw%Y#0HTk8CPmI1%(&IfqM+cG0Gplz>WvJ|3FoP3pW>%kh9Luv_6jjKDMs+}O} z?`+_KBKH%3@~{5Hs~=SCtzj9+0*ujgfP@K1q>K#*;@^dyg@G;*bSLXIz@8zMWNl!e zTO;Gy{93PGru-d5?>!Ohgw;GQsb8_H}* zLn#WC()zwkCkz&bts5hr>Zl9Qf2Xrj+=B zh)&$qq8lg=U^bKPeL$&K@gZrV5uijq3!`wn-lR|Z|WPMk)8An@~Sp2a)(qG#)Cm7m7fIY_o9_ng;pi-jQ{2`SXW z{F%RngnfG}M}k2?K}zw3(?2ASnVr)=ivk}9eRS?g6B69)xnb}8TBxDMy{3o39X{aD zXU3nTe4Atjf;XQKEk+{R*hypEeFsqiNyQ6AtA;AG^kci%Dzv3`Km>c#f1cv*ForcL z#z=5*|2r*~ce(ZT^_tnOqgCPGYziO5Xr1w!*0GAs||wK*nUlNzAcy;n_AxIk(Qd zK?JjVA1ep?x$WCecd{bHU^bZ>lkx!d4)AaL4 z9JLEYY3i6$CuF|6bCEl7lb zJD3$BeIVV@_~A#J8n^=7=!l)<)1&8H7MWFkxc>&wejW-R+jSZn9@~=q4K7N{JapU` z)VsV5&M=!Qk*I)RB3iQA(I_|wRGa)cYdv0bfQ#NoyrGuW_Pw#oDFQcX6M9R0x40HB z4K8`ut+(2J#u}Y0*E+CBC-x%v3Gdtvr|OTlIR|Q4eGq>dZ5pgOWx`FUbXRkV@SVZ> z{!fFUy}AX8se$aCs6Mg7AeGM_FPs72DOX!!3swpk@8f;!E)RJ6-Lp^JdKIq#P(9~+ zn3S7B=o$FRMBc5C<(JgmaY~WyQ9~GEyf&EWwBUUD**eL& zN!@JFj0JHX%SHRRWrFDUZ|b_y;h47BLh85}2q#l!Fhv)CjQ^2E`2_;&cZEn&67U&{y*t8>Yi_vR!xAHj$DwF;7$#tE}U4 zl9RiHeQ!fD=eAO|lv=oqPXQ+Um2Tqvw}^TjblM%P!(xLP{GE^MZ>Px0>795+upaz@ zn>#JhDVl$uOc>Ao1Do1gp$kZ+@f_y2-q@p7qg#rKh&WsCG}6RG<#26GABUzjxGB{f zz;~tB!SX$kcdur}M-{z3_$m&qC5)<@b?0V?O!7IkCqRa#K(!Y48$TJ*Yy{5`ECfwk zS4yU2SK3S!keaV>{eY3A*bPFN`Umwg8d5AUX%r0WMzR+_42IB7oH$j=T;tXt0;`wx zeMQG=c;3@b6M32jG@^!a^3|KdRG|w!t>4k;VqXbZpXw^iKzt1{F%oFKeg~z)-CRBB zi}lZP$$FkX@9!8ZM25rlodoyj3fcL#GO-V1>LA0FmNs98a3FWrm2LIiKYlH^3&-{_ z9@=W39Ru77>!cUCd{(Vv3tpG|2{E~_hg+bIpyrJ+P*W6gjXWBK18KRfe~a56UMyr3 zuh@WC-@tXUNJ%cIMg}a9G!Wik|6az2BOMVx6HXLOU(O~gDu?f25Cf_!v0tpKdjWxl zOU>|Ie88hA@^t^ipK5(Ca30311Y&#adGX~u%En=b_I6{ppmg$!!?;8QfXq{J`A)@r zTE0Bd+$1_z(SG2sbB88dO}g=6YhB^$#JZ8` zKQELF&o7$o%^5}0{HCy7{qY0Og5UsL?1SRlDI~{XxpY>-40+{w(|B&n@j<W-G@YsUdxH*{Kh+y&GDd$z|rvHz4|V7c4*K zZkbDAVGK@T5ysUl`oPooV}h`)?rvr2vl5zqEwgl(_l-ilBwnwqrz~oe z@nvkyG!zC?W7EaQ%uux){sP!;VnbhTqmHGO=yA?2Vf?ECe$+8@%aQlt3^lIXvl_fE zmdq&ok!U}mT|3jIJGEsAGXmL-dF>o$dG~lz3D=$RSyNoF(gyiV0M%A0*ln#>9qtoa zv6W)qL6AwJLh@Ov+OFaFJZy0J5-8dPee9Y?q6KBWxp;5g>b*xtefLvyk1@l-;L_O~ zQ^2O4(0{&qUu=G6QZ!(pYWKE$ase0TJ*b8MSK5B1r1urK0%PY47}U7e+kqF9=S3KK zz1$If#``vdA@N@#=F!J{!J^%t!duz?;g&w;MqlE-7~TvZ!9fl4=X|(jWA*y5H9eue zao94IzoLy4IleAdPUn|h)-$aU4kGx>7D>0o-MQxJjc}wyaoJ8&@Usky&;HPA%`_yj z0?fwP-52!~p}&nbRHyT5f_Ka_n(u2F=HkFP8uNVldUOyYf%mVeH6%%NyBn#g33lENJzjjEyX^{*01rAuCQoQ>CzG_S`}w0iEn@aN(WyU@e=sW@fes2 z6r!o=&6{ryu&vxw$=k{XriD_+%8WmKPfV_I2GH`+(mNxka1RLF;9F@*OSSmqIq7;N zGWYnizA0s!Yra-tvL;ZI>pw}jxIO_fq%cPfU#yw-rHaH1XPnJYbJ{OHS4cYBy(6T- z|21EuRj5ryfOZykg0*tg_hgRCsj@a|M(8F+FUr(*W`{yxNzMQJ0`o-^^59irDNFa< zi6TuU;zQCJ=9HsHV`v2FE^H*Vql^a44x1 zQI^5gzjlPE@MoW0WUZgwm3@#k2{lb^4{lNhtYghP5mUu1v0DN(?Z|!irMd8J$=li1 z=C9(O(-|p=e;GsHj^?`%E7Zv^ftoqmr+``by3POctenL^QvGN$SqS=Tgo=QAf9a65 z1)Z;CvlY$zyUN@XZekftUNyqG)SDSig?C^s(^!uH=+XH0nDfq>RUd@8l+`O!+$5te z5QNj@x3@A$+_xGnQ50@eK`=O7r++5g;Ce8xexMbj)fz_0^Kh%iZ3NG*T`)izmyaaf z)(xt1CjY4HA=6ET!xHuPO&5?}7YT9EQX%NF)3g$g_utj&BnQNO^nRPh|G*<;PtiP# zmQCdd8*5F3s&1@b{=7jE7bG1_K9>W1#&N5qK6l6F4W5EOn^I#P4;7joka>Lc1U8We zKKl^@{_&1)rSkPq-e&a%Qq(dwVEXvnK$Zc7_(HhiGDSOBKlQnb&00vMhUK0}Q0lDR zZuP!4Q~BnjNIm|@E%WLL4S_A-UI)Xh2ECh9^JZ>LI%cbEOhdsxZtoX3%%L6}Gu5^P zvU(*sBfJ8(%&4_V$|S0b72_JXM+WHeZ_pMk5TXJ>E+27X0Z;0f^~z4}G;gpM7^_Kg za29KXl$<9P1exqVQ8eq1RY&--957mV$#WOW21nyG=Ed`%hAf3WwlOPS0y@t^Q#x&X z3+>e6qY=i=LEZggA9kKu8kX!%Ii*rm%$TxsS;Ta%-E#xn9UHy1a*6Y2I2LXub4mVf z3s`0Y7KCez--+Wt(R=8pdnO>2l-ndS&W+0uLq3*|7hwAige;0g4(V3uvp3BbbE-$h zPO^c!TFDa6Z_>V5MuxkGMO^xAYoTc%?>zz`iX%}8|x@TZV zQ;RY!l8h02uB&}l*XTCCepB2KtWZDT*|mEtN?q#m@|}Dbg&PjUjdakvNUfM!tLL-= zD8F5rWr%xnenSK@BrlBV5^@=`Nby|G%rbR6j?Q>)T#QnZeoQCjlO^kTq9c@eohnix z7H0guXpY~)v3nVaNQxUms(eoKN`q$<$a8h)?z2WQNxfL%B4M`w2NDk<|Lds45F=Ln z=cxRQD-CN^4m;13%M@KYp?)gYPyi(PR|~@Y_R8bXj)0!KF~MNIP0-}Y+jlr>FJF+{VM2N>WVW5 zv`Hg5EaAFEK{U;?hptfVMkQ>oH*%A@r9LDxlJjr=BwOVwz#_3Fx48N9LZe!HYq~wc zz{qEIMV7-{CBFq#(#1PSnZK~fa-U;Cjq3$)kmb~>$o>WWp3+M8i`(dQ7< z^I6LNpqgG3HeIOjT^MCf?LB0^(;V79X(Y-uDdS4Etz_@gxx#yNTJ^Bj%fr zx8Cc4)R54kyv3+mumizaWLnUP!+f3}h1Lt?O3qgL#lyUQSzUI(D^IeDB`96$=oh%w zamD{!>&G9Mlo?C{?CPMW_v!xGDL$*=qyq55WC^OrNfd`l(z(-U{fvAi>2yRvgTdVY zU0a!C&5K{wBlEPY{qNYCi-P^5b$*?@a=y_7l+NgLMYhg_8DypD#!Gie{OAL->%JOb z+Sbs?eLthSbyc?bxSxpo%Wi};ao%-$iY{;ZP4>&^DoX&=3(>lXlp~u%6tEoz|>##nqxdYr%mQ z<}L~R%&HMpbWd3~#(%W{Wc@&3*rF5h!|wR0H4JcjbcS_~_K@}Q@_im}lTy#PrCQ^d z=-%n|{3mOnQPG7)SH_hN;Q9}JHd86d2?Dt;TaSsNEE5-85I=-Mu>UYY{AkD=hn*NY z`fJ7hq)d<9(81+V$wGE(9xIYS%`Eyk6?e^^;zb3#B>*W+{`I*d;p`X=qfl&b^$ zRrmELim#MSfDZIoXkXE4B94C4c@nXS(hLvhrs)zqy+lK-9ooO9n)+Oe7xkkY5Q7@n zbZLH8=oPx+W1l&!&isIDiw=mkPiDvk4prWIW5_x<%-3c{y+nye&I^L^%*Tr>^RG3JA76PBT zUTokQ!Cy}(ltwz)#T^!GjbWTJoia3d*Ss$uxA~>a=uuYu0ruNUu)~z;1tj-RvzIjN z(K4!1A7;ZcC;Ml~)&SG2M0)JKD0rUKu)(P9+J9!`WrW9*mzDuD$3f9Fb`yRH$euX~ zR#nLvu=83k8vybfRI51vOpuhfhk5yj~mJ7Q;BG1AAjK$^c@Sv0;?`+txL9 zODyoyJv#=IBUL5y6@SpoRW9C#Ew83uj(E2o zETq4ciEtnQxr>yZ0Tsm8KuSlSQh(+FPKRZsNyi#AxHZAx<(4K)dGyncPtSWA|%WM>%}#G6?L|VWWt|<6$v>yF8HF}=O!qO?#pAgh+5E}6@j60udM=~ znv=c3?XdiOrpzhk=kBjA{b;M-&GAEKUyiHGi1P!U^KP9P`xM%bG$sL}6g`LMMnV)^ z+MT)6^}dt%k{2a{?NjbcB3*=yGKak~ey8TG{U5Z-kA&n^nJhyLD>16-c&EySrL~jupS=7Pvj;lBV8hj=>F9ePlt*2r z{4J8iU=il>IToZNq{e4M7l1J+O|9{%Y=zOnYG?l2Z)#dV9;f!T{=o09M{<>Fh`#(r z%kCyA(L7@-lH`|7g5%cVuQ8yKKN?$DXZd}u#r$_4Bux~ls{kb|#UeZmEl!gC*rQDu7ZMq+fgBv?FHHbQ1KqcpfioBjWsbUv&xa8{g4#x8o zHn;e1k3EM)AH0h#TX;;R-_H5(IkbU6R)mNXl}{9yi{qLyeG@A9Pa64IngVCp{5Bi+ z@Ul}}hElLVC+=upKR?SeIiD|4R! zuS|F1LYE#sX9r{fNUI?BcsNhkmDd4zes_b+h|{Wh!{eRKw)!QQ2_PzjC2R4x1^5#O zMZHPkf59B!p_dc39`_6JlYn*}g8P8OI}CV~ZDsAh211!4<4}hu=QRGoZ)T)eKFXFd64gpEId9d^8;~BW8Tl3qD)PC*CSLVsX~O>h@2vd=o4G66dhX7 z-vBv>U}3+z7Q&Xgv3X-SX@!GGR=-TVdfQVsmX5?@Ja-fxEIwG`eU2Dr@+h=yT%}!(YE=+ zoC%T*xW${1blY1DUYv`VCy@z6`%HpbF8?T*06e=WX)?;Z(J)ObrK@MBS7$hAxK^=U zJ>N^gO$B39vB8w1N&Ki`(RB?gZi`fLZ*WxF^C$Ul*%Fj#A6blqb;(v(14uXtY42AK zyAW#=!K0*)Er8j1v(D_=Ov1s>DbeHkzwin;Ag1B)syq#7HXzC~I_H=eZ~)Yz{Hdt3 zW^phgr2u|*st8njSksZbw3T}UksvHe+p^Nnn{)N z1(y`g5R54BLXB-}aBq%7cCdkvl$@X=0N5(8SS`Tfj?3Sg9Y|W9>VC;pthyqywPW(u z^^&08unUyFz&Xg!d>1`m=ZqV)v}oH=xfxOqae`Y6er8#OCx)~fIx&n`AUe3yXDTg` zq~m}>GGp}L_#U$tfGf>#z5e>x35IY_?JqypY*)+>@ix43{M-55l)k?~Dv#U(a6T73 zR-UD&_Our;pTE?F)V>eZsa|S?rU;c)W8RHrRt*$?d6goXLm%VfynU?I#-vhy6TDFT zBCz=J-@Js{84PB;74kVOmirkg<`;LjgA-Nc2wh^EXG1)6k=XHI35vSuu!$uGJp6mXiPn|^b%@*7PucGj}Ab`h@ypb2i5R^IAKo<_acS0qz`g>l?&WX zYB9||H}!EZZ+d|RRchIg`0q0Qb0|CRlD>M@PW@Enc&+cT!|o(& z8ubA~(6ieaiRKUK8A6^_F`j>};NNcSqYP>P^a;M(RZILo`~3fy7bfBN0!ybK4tV`< z1AfN>1Dnc{!D&&MPN80@evD!oaiKXK{*7?Atd01h9<)PE8G z|Mn!9s{!aQjHjX~;eQ+O1{WBhaV`G*Pl)yBeEiSDkhUcSQaJ-Ny?c7m z;pzDAEdMVn=WkGS`r&8sYEFx$|9Qq>r2+cIRL>o54L^QCrKI*(Tj7dJeY`U;Ea!aE0M_^ zA@z012Pyk*T|@O}+}=!OP=(i|D?Z7^a`v{f3@#hf#*;<4nTErUt`#-ree`52Ho;`07QTEC zz=nbqbh>9U;mgG;nIxifJJ$etFj-1T##S?0_oTa;O>t$n`;5-{{F?Qwjys{$kcu?` z--j~upX6p<9Q`)E;T#6lm&kWrRZjP`HI;Y>|Nf{ydEX*bQTYMLj4kr&%TrBWNCZo3 zB};)D&?aY-V7FtwzEw_syyS~o)x1C_#vv+OeG;316$sjWz#8AV!6UZT-W6WqaMwnI z54k{bWl5z)d_`b_SSlS%j5E}R0nQ~){8k)5IR}8=&u0CJ>VUiu^)|=vuAAVlfe&@i zjM57 zo2zwS2B=1pU5AYw=gV0sz?51%hMRe`uQ(I~Wk$@d$C`n+*EYr1stXx`6H6(ST=TR! z*8A|z;DeSYiYT`8_E5!(cKqRTjm@>w?eNf>`jhK_b_bI$(kYY3zKKE|289coA<-L! zhu~|*MmRDW=qJMy>v#FP-H#02ujahDTQ#pxrvtAz1f{C*?9zG z#gH742u|x?#j<67?|}<>CnohYJmH!X@Ywzi1pU1=7e2eUmpU0QzO-YmtuIPseGmff zgI%k;od})F2f{;lw@@J0JE~6IYzvpegAf&cDZ#iKBDc~yYG@6_Pgrjq8S{tG8E=pM z(1bL;j+=9M`#M>!>q3>RWez^MFpE)PrUUM_vFWgMC$vBYI}yr<+oYs27q z5WTQEV0&V%p?UIPyl|y*KySV7UNmLnsi2i+kEPh(HRKzh1aKwA>8&G zBF68Xm4lxw4W1wCWr- zpWl9!-pl%vTetE^FV{Z6s`ET?oAF)#yw;!ePBE69ZKh(8s%)JmImp<$CpzQvoc+K< zzzAMlHo{~>3(JYOXmQ_pV^DP6!3)eX3<}nZy|7X#!`|hdS{(7kzVhx){h{+==(`eewR-x<|~^ zjTlDnJ2GVzv_3Bl)Y}H)nU9wh5@@Hf8P6v0%-K2j?8~pGic-y5M7g5&0FD1F`i)A(2o@sTQG5x(%GY;7MuYa z^w&QU*Zbl$`U@d(kMQjyoBgGyb0BUWd2>9RZT-nBd3Xa0L$nrK< zTw?LmV)#O9;#-H* z)d#~q1y-{#4z*Z|T{*mPbyyK)ef|4x%7DO0mPrh@mUv8{n1dqSi71k~5EEYebP``I zHgV%^NUVbqGn!+MwCVxAv9>j`dNnX>&O$&9xN{>zPd}l+o!yi_Os+pmr`$PQSsJ&iIn__G!}n^NBuPK zASX4-@_vxTN?naB>Wz0VSAWB_<52!AKpJ65b4WKnuq;}nxrLNXk#(tfXTsQK=SXlx(DtiFg^45EBoyn45x3emkb&d3FN}Iw z%q-^00CQ>TIRr+HA-XhOi{$m|Ui8MEAn9JUrg7BgFl29u)$|p>v{vZUd#<5=$>;|0 zEtL$lsZqUA`LxNm_xUPaXlniZw>)}K48n^|)L@{h7w6(`GRhWiV1`O^q2?Lkk`2=Q z%9bO#e$SVA{$XyH#XoV&V={Cm)ubFq=Rz*jH@qQ7Zw%|ema^tJIpW4B%tyq{A;4qw zGE^b^OvW8x#%v>JaTtg@z#xo|2k&Ngr5yJ!$E0p#5}I;fd#kS4^uY(r0e-I1i!+6W zcNhF9i4VR4NmqTFS2=CtVw(ejUhk!aUBx?ZE`JvWT4+okyL_pRo(G-nXu7Y-8eLw8 zJD6>Qxaco+lP)i(Mr&W#S>33)^i<~*c(>%8x0#~*$~_7xopYq82k8oI2HDi?L<{P3 z^P?oK3$bOEJSa66B@C(BbqAnyyT&SRUkF@NpL^>6@D)`c&fs-TX6<*|KTV`^R!-MT zm-2lP6h+Ua(0)fEd^x>2W6v;Duz5VS?%TWR(n8uvQZYAQ+CoWC?0HmI;v)eAX?HEy~g1}d~j{#b8D5OvHCpC80uREESaj$)HUCBNPz(rVfkY^HCoowU=Y z`BcB_I_VHumF&-x>liuoHe=X<#OWZJ9ELAcy*J}TomQ68d(*e$4pV9?3~gO_%mpi} zXRP;W4Y}mkV+iN(*4t?td12nb4VAln8drSp_#*@)gMc=GO*3t&$n$^8@)&;rH3s3Esu}e zDM;hdcr%PaD`Zk+j~`$HWs;M+vW+TbN2QRlMs5Y(e!Pt$9!)GVnh&t@fBh`Wgo(qc zP24~Y&d`6K6wXGI&hhAFxeVFeC1QsZp>cdqyb9@=I*s_|Wq>)|_rgZ6aigM0JOVBR+RD z#Y}7vHtkJmO?rRycD(yWPcVf~pXJ50m3IF+ScrB6A5L&`-Tkyy^<@F7Q(XAIrm3Aj zb>6$wuA2Z9I+B?HK3mSDJef&fmj2j^Y(eVR6QEx%xvPJmVAPLW6tGe!_4th9~wVDo3p0UIC+QlvNl0QETZ-SH*yO`oZ><-{zNIR@f`%a)Y8NZJPOQi z7v-m)9+8|+y>>KrsC%1h+d-$%HWvQ!KL`feH5z7VmV$mwtl5h1+O6u)=|wk!?}~c^ zbnycGc9F5W`Q1;sTm6%0upO%G7*;^Hav30Xpb&Za3ZIjNJ6*fw>Sfoj_@}DXaSgH5 z0em=8B*|s_QxM+-X!4|Rib-Q9(+GkJy4?Ju{;3BWp~nr6-&UDc8a|i^yJgYu4_y4!JK@{tReiN}rb#4QWxuRA%;q%s3KJcqj3D4m7pHOd zOEAUp#cxKv=3LTRJofq6>j#N4Mu4GPeioBTq+pCI!Uy>^#kbHV)5%fzc-0*FQ(lQH zgm-;u(h9S0>B6@z{-*3*DPBHCVj~em9GGauy8m?M7?ojT-qB(}k!|BO9bSm%9p&+2 zCrf;U;J$}zqvVI5%}*UToKA|3Zyqt6)697@GCrZ<#Oo-d_aM6c-iXE*fhkKd6k*7> zZ_cZFWLU^A{6y#WJ{6f*4L|T050n2&Ml) zTgbe*aft@P$LTS;$t06iAn=|b;~KgJ4|pzJe?u`=FHALto{K)4B9zr~YC_BQ=WJ! zZ|ai$#hb*;o!}RSJ(b-A1HqPzosSKDVGab)rc$6YVWt@xF?wO&CQ#+G@IYIFrUJK( zw&&L9_lE%iIOp<{ql4*a9;fq5%Z$;Eb2W;ol}+=)ScHTsu|&|lDAV6y;hRj9%grg0 zqFD)GeeP`H4BMU-=be}C3YQGn+@#m|*tSKZlgH(S=qWgPJ2A~z4icR;2`hOg_+b|* zOc4<|Oj2^)jV@+;NUroeYe+m0KPju)@*$qvJ)U|p!i2z`5E$`M{_*Gu2^*yX_#mFk zZ`k*sops@$ED`+LzRtzy(?3}xnJ`+2n4N7F_50;$RQ-x?Yx`V>y#+fY`>KjQi@Kk= znc8sM_`-Dj`b(aG{{=Ca%54O{LAtRG)K}(>O|)a4cl!xpDhC=*60&>1sytCq;GkCe zgVl?L+^Lo~mfQ{T%ebl2H)NgP?Li5yyQ6Jh#_evM zv)MM|%{2`j2z5YPhd zS@mxSW}cu^R^~H$bTnz5 z_|9GFR7TI%Mpw#2>g4Ti3HIK87Ik$tXe9?Th`NVC%zgL6w>y(cxK6452tdlfLiiz! z8fVX$aO|XPkkhijm8`DZ^7ph?SVZ>sP6plnYSep_*4-3A^RgSLoCLpV|8!3P1#GfO zwWamJ0zn~-rki(z_Gk$|-kjz@53SyZ#!cfgCV9MRBdNsVJN@0BbsT|SYA5ElYaw~^ zK_Pf6a4?9_+Det-I|~Babfk$`zw#>WqAovz^ zcS`_@5iD3*CvPDA)=92`& zb<2!lb4q?<>$%(vouAltZFogJIFMt>$~PF%FM#)=)_AK2r*+?aqCj(eCO8@cv?hNh z`Loh)=Gwm0V(a3QL(>eT$LW}xBv2W3lm+FHJyQ<2XA+bSluh6#+z&8AJk8u3mxS4v z_%e)0QV<=XbIb&$Z?O_l3TW;#c_qSDno}+b4RlkL$49nNgKh_5B-RIYb0`tJl1Ji< z*(b8fW_CChVr=tHbcrdMzj~q+uB9pHkc{arwb7C8C06)X3ESAcVqVXVp(1Qc9UHZ` zSKnub&F4v8F$xDq-ejjs`pPpsHt!E%!6^6DWc2$}=9N)cuI45MRiEnv>iAZW39E)_ z#6~aDyShODfnVh5=k9Q#4+n^yI@N*{JOzwq>2Wjs0u~5LAKR1(d>c+w!5e7k>mMWm zb(CB>1LtWC<3)W;oz^AP?pBW#vv?VZr;LF&-|YWd{(RSA6HH6`xNM?q;=9G*MRN(4#~Y&{M=@KF_xU~>!>+evjC)4UUI?#y1n_!lGK`l_SB@$^ zyQMU>B-y=IZm;hA^_G0oqWVGn<=OS2ox!88KbTn9UCjWiQE=fau&doQt;PJOV5S4U z(r?SR=lEJan(68r<}}EV7U)oCjVvvK-^uf#jYI5#EZv;A^?CPnwVL68ngn^|J%5*T zBtd@>n=8&KL3=Y8i6zH;U4}NsN8#F!Y(c*~-Z8Fyxrg8(ZAzW5o(bL;G{OO4*!?7i zgHQ-5s7Q>^4aH*FZSxXuOfFw1ieHL8u`#9mnM^%Om*zN_C1|Qt!J{V01@Ej0n^=^9 z5M1fjl-%W<(R{m1uNA(H#c#=I!6i+Pi7tCd>&8Vv=n8x|95G0wXUx#(b7rv9X zHj_>9mg66`Lc5xiYhE>@jPv2!zcC-12Aw*cbJaDxP>6czK_c%)7)edN)4C)cf;77| zs+qF!aq1ZeUWiC|0Oq`Z^Mwc`07A2CBHQGJ=FO=wVSMhzpcJ^D1obU4#gd&aM^NiD z{XT-gc2ABnGt$Zkn*woY^`t}@4p?{mrus$64vqqB=w4md@hGwJz-U*7fH;JWh#`uG zjgj`zv%9?{7c@A7d6o;)?96QN_pIh=UewX=EB)k6yeqA5n|>{pYRr;=*iVI!B`%V= zir+Sry8zz)W+n)4PDnv4Nl3-I1@%KCT@CRqV(&5tFQUenN=iyRli-RlePuaG_fR5L z*aFMd7w$(a?&ReC=|-KceyQU*6^{@3tFrc~NCX5DQ;@{-{tpE4`4{e*R!2ev;m(~u zC0*}YT>=TrKIk$$#XKNk+Wx)|not(C+&2q%VKsq%%F*%`#Bc1wh!uzE=zR+eEh&rs zPJ|@EYGaG}*hkc|`N(tv#MdSioLE%^PJ8gS*3qXg^EdRxya8fg)!L$xN!h6h=qWX! zlJ~G~t8BOPQ0E2Zp4%Z#`Ln2J{W9ff_+x>FJ3oc{MM}}l=kH*negQg9yMpRD6U?NgvUpK=Zm+u9PdD{k)jYe~5k&-(yd#QcZf`Nmka+P92P}I`z2rmV5WuBxL3OCf z{CHmprzA+7*L4;d8UDd`9-2rx6#|Z%KtB2XWkUz~ywd}wK&V4b+e-J81~|+dd0i`& z=gENgp)+r?KJo{dTizgJ*Jb`~PVS4d8eLQSOn~C3voB>@+^rMU7moPz?%Xd+?qmT>w$PqNqWQwB=aS|D!iksY`F8ZO-l4=f=x{15b4 z>xv^i%_qQZ;^hKDos?GbJ0j-zk%bOtpgpj;Oq^u^orhK;BwJ;HPRJftv^#$0NxhN> zSu05bM;+HSnjDqDiOz>z;&q>wyse+*$+5e@cYs#nx{6@xs<&XLV@LPUGhhp49>UKbtGl$2ef_J^4F{B+| zl-nGro>pHet;k#|S8wu5o?Tvy!^86RsEn6jHPa`OJxV4mFCLlCMu?dBk|G0&_??!F z+|+2U@B(ORhOS`gg0Xq8E{+z=P3rBLnyJR>@dA-N9?P(rUU!;jI~gJr#4jNPNIqkR@X4jPp@ULx2?xc z1W=6O4Im7XBDe!LykL8i#7e_ozE`uvY91B>sdCLExpRk>KdgUAPuD@fyw-|uzTz&0 z8^k)#&1Jx5Ti|}Zf?8E=(a*wb{L1zwox@-PbaK?Cm{8wwJWhABDCehU1%A(>4MU84 z*e3;^L@<#;=Y4_7(VNVEhnaefLsRkGpeP!fv`Un?!Qz~prSZk=Mf8t8pm>x!b@Wie zbJnu{DN<4wq?c72C!rL2-{VD~u@vv^6`e!d;4FtYpT~5AD7&P#lXv`eqYM>$IaUx$ z%6XE2>yIAaqa1iouFs_I5Ign@Ot7v)qe!<`odTp?X?gAtmS>8_1G!9q57c3sq)muk z&-gdhDb0AoJL~ozjN6J^^hKXkWb63nq?_#^I+z*G8J+Sd&u?1!!;C> z?+Bwj%`m~yh#|2Jb~J`~#3HTO2$>=8Oz*g=WP-#BeENsVj_2NbN?>NGMfIZKj@WfL zh`OK_Vhs0DR^vDR{Y2Xkbi#dF%<3gFvJd&fz%!0y6h9tB?3T9ZJ;o*u>N=DygI@Or zK{+|877kbP=sP?%Rvu(Qey7*qd-&~CNRi)`-5f^GZ<_n$YxK=g=#v;IP- zC%K}(NtO~-fj{o&`X><9AY;6t?4vZ*$NNa7Q>I&z#+OmItp!xRgR(TTbf$BPg&f1A zbNEM6%j$m7q?nGjP|@l+GQAr_`_9ev*zpxz_8C4?fFf+(jv#zhYp9^)O|f;qfN><4 z38*NuQqYVHHnq%@1Nq_=t9>7c>vYfpz)W|ZQ-Rskf>Be>1T~ya3hFbB=E2uZ_Sv`7 z(Z;GNrgaGxcPtLgg>yEuVa~PSru2Q=v|uO%%Kmh(@#rC+#CDh=|D( z(@qu?nJbBfD_vZ32NJmKBn*V|Sv)Vky@7hm$K_7NFfckDKFB@;!27^*aAzKPZ@zlyzO_lX#+68bC8eZ?&50Spo*To8=~ch0q5i zvyJIlJpWZ+7U5UQZs8+`H{4qx!<$MrVTOWVmX2h_0_S@y%8V_r? zOsq3hL(ocBA-v#uPo92tQO!2$9=PHn(-bJEN_taWUo}s6@U>xIxy}V2#%v30?#K>n z@lhP{VX**S!xmWlsnqGwpmT;jj~^yXhS?~Cd4~7yJ=H?R2L0-S5j0CcPv%c$-+B1- z2J+cgfU+%e9=@&k0j0y+LwMFSrF%vWt63E#ra!JGb`dU>wu=DH^lzdIbBLxuC{>UT zE>!bOXa(wQn?Hru-8k)MfE|^?^d8; zVvxVLmLU&95UULhWt4|cU=l$3m}IcUYPDWeDpX+!D%Yebs0MNlDi)#ge~k297ip2T z^pY6Uv(|1=nhL)g^r2}!vCgn8f}yH#m_)K`Bs?Gjbv0J9X+m~gcvtLK!AX&CsYBLU zE`DvA7G|*GGo{jV)1I7^^~;&b8j9ah@^3N zQo4M_GzWgPah_Y!MU1&c_G}hWIQWo60BJXHyX095#uAPojxfeX#<@Ns^^aFy@-$15 z4_C(pROtks()gawg6fe^7n1OH!dXla$NO-4=djJ?5G@| zzZk)>lUEcN=}Sz}O;4P6GCj-6dT@F7v1-TiSDqXR{3{+~4@F1(!`3gN*ZY~>AqL%a z^2eHCZ|qVvCj20$wrhvcxUFOy0>VFtlMi?{U)GB#VtM2{K=T84IH9{pSJ_s44=yA0 zU}5|?OX;qS`eUIq9jfDGxFRWxyr8iY%eWlxaXg*PC_$b=y`CF~!l_>Yq0;>c$M& z!eRmLJSs=>DE$Eb~|WPOS0T9qcNUFLm^SzP9U{9gk9MK5e`C z+cfcDmimtw&2zkIMsmaGIb&Ss^fblE{W(;EUg}n!QN?>u$UHq)pm>L6X!X$vxfF4w zyZw-n_a$%Bu4a?|Co^5olJ;}E>PLYX=kF$wYd&V|fw&HZq9g?EKK&e^^KyHRQuxT> zRWtg-K_{WBc&xC67`K#(+RiJ_$RybDQ(W9*EW@FXyJTTTB;zFUJ9GRi8`Hi;%;R5UP-6VgFF*y(fti%fJ^&y|L{`k?OXXGozO$uInL z0@k0h?yCDm7UJ;DF+h*p%?7;5pSrQYOA>3CSZ1g@zG?|j9zz*b*yuDW^_zShBR@%R zHQA|D+QvMY5(iD}4|_xkIRE*rZh9J#^Xrlt{q)D69bVaMKMAyezJ(>NXc$ePOuEbU zglR#A`vR8_^t&?(8nxmbphgr9z(i5tBBH++gf`FQtCkkOhfhxQyJGq0C%hj(rouQ{ zN}v03!4GUMLo2jgv7H1WZ-}J-*FS%3kYoHrPCV`|;!C6in#)l@g0yWwJ}E%Ai`Di4QvzvkJ)`$`-GG<+E7gc{eY` z{KCN-Xoo8y>cc4bSX6)yyT;H&dlbXYgL|8Rb6~Iob8~iK##77Oj5n}5J6szmG#R3g zmx_V-44S^x%Gam@Jz{1+{ZCQ+6E0nv%xPq5{Zsz?vYG zd%L_*vx&AwuLYx|$C=amv&$l$SotwK%r~KQ z=^y7$$@2eHXVA*`((ehGvbk;wKJDQn(jQ1?Dg-*EoVKg}i=8nV?{93l>48Z&{PV35 z@B$^!`Z{3%IAPYn-L@^|OdmMO+d&F|ZOh5#`IG}+d#U*ZHPAfo&yu^BxYf$f1C(_~ zF3YT!K5>auDAgaVu(Sg4nh?ori%>w_6a&MRs$B3`wm{b@>uvp*Rt-Fe5%bbxu3^Uj z*hyF~uFA9}FvU*VDZQ>Z-}VjJS|5cu9Vn_4E)twd-#_B9SJ+MjMOA7!lV3ImnQ_^ zNDG;?JWHRrZOpmqv=NFHQH0~*O4>9RpZU1${wa{ghfOV5Uz}Lw@KTu!W>N@F6Vj^` zA;F)X8(z7_fF(SA44*ISSSh855gtM-t%(P1hGDEbHAW;x1Iv<%YQd*%8HvuQTD4GB zI*ls(5;%LnJu=s~XNPXD0k7Dg@Noa)^kCmQkykP9>3Q>nMU`a0UA+=y!yYmolj4h_ zE3){baZ~Tb4JgdoaWbY*&<`WRCAREoPJph{TEBtaRC5O)1S~kg$Xs8_@Lp(t!QtsR zG4pF>)1Jlhe79WcbU@JF6cA=`b{G)Ks`sX4*|PRhQxSTqhGug05&D2usx5=vXyX!) zgLmj$feo@~!p2c3e{R79ONVaj?;6H-%Mdzh=D)jE1i*+e6U5r*i> zvqQ9Yye>}Dv4hi75BQwz4xim^!&MHBRcm;S_n)mDsbjXJ-c^EXhCNJlUw#r4(PJo# z9$+z?8_?tAcDm&1Z#_eh3!(XEOs$jrzzUlb z(lwr9Lef;wBuy$lSm|lA>ET_Vbe@2JfY%I z%wqm3E2BNI{fO)l-p)Z-o>r}U`vc<~9)|ji^COoj3g@XLh|6{rv_f$8QD##fh)JBv z{2yG4%|WqLp~9NlxbK3rEta;78I7eXgmT7ilOQ_+EYA~vO+1Ehhhicx$wb&?$%wLi z(D_`7%bTS5_4w64U>I`??DCii7<+4XI zs>o((RHa{$XV$QWDS;k2&sI~QJEX`d#ImVWqpAQ%kPT=!k0%g@NLg$SnyvtE9$#Hf zPdxG}8FE;u_DavJ#AZKy&3%H?W_g7d;PYe2uhBLPEv70ZHC@azjoE&8kN%Zk%nFh> zlWOrkXKM_rD?=mT+CNytcz{M6!>T@3V3UDMuPoxk`g}wXg+l7#`IR48s@k!JtjK-u zeWQr&%8r7y#1hjqi_0^v9Vp+mtyyDxy5zNLBZboSQB+BdfHxAhAWxopNR7+!dI^xK z6U$QtTJ#3BtuyO{&C?#&ZpJf7DX9_{?cg#;pS?eo^vyQ97B_Ll4W&z z^lM~#aOa|AQd(AZk{1Ryz1%cE?FwB02`$FksdgLpme%s)sU*J*J|gm6t=Vf9d5*A0 z>#1DmxF`D@fhl+9qZflODr~o*V|Z_lbtw5Y`^L$x5D~<&98T}e|J((xZae{>`;J*- z)Z@WsaUraDN|4~8`0bf-CS-(d72+@S^6Gl<$4`C^kE=2^y*JS-_z0Ovg7>oKQV?@B z@EO&q8hvZhMufk(3b#kGWCVFSexUUA3orr0RAgxP%t;Y(4kb=+C+bXPOmCVb%g>oc zJW^uCd~_Fzv!*#Rf~~0sB*_&7WyDpMq!W1;X2+5)_4+Gp?y;VyBp$jseIP~sY}5SQ z+}%>nO*)ptr!pQy{fz-Bbppryhc2)Y_DZd}%2~gUckf|{?My@4J?47w2(H9ET0pk|8VM~4VKL3G9SAtY_yvgQIuGi+nn zWaSECpIRj^b`W4Ut_o9co))PT@ea%@mTmI@T+m(K>Pv&KMG|B_i5ma>pa&UR%@I^= z?XRi5+bhU~cuM&Ao-}h6lNKqyIEE7mH^eYNO4;xIK?7Qkm>#vgK-?1zu2=f?@aXY4 zPA!$>0d}6s_C`#;)t9`wT3-oSn?6axugX!lG!=ie#J{lgR5+{$dS;4w+hsy5X00iZ zao)F$e0Lv0=0O{L+UL5v7M;;$fZxKw5An)G@?IhMShzY`rPr)db)&xn?`KQK z#5#XJ2VFN~tgE}o?Yz`vbuSFiKdi?FQ;9Cmj|%K#xQ|U4NFs!uZV~fB<||%0?b8OX zuYfLvW>UxRY3qr{ocZDwcs*vDqfeL9iT`(@)9}xANKzqe@HpF!SmjpQxu`yk+s=W; zvjl>P7ugxZ<~J+SO|M#7-h=%Gw3QhaU@YIjp z4Dz5{5dOzX5u!di#0D%3rCJFtJy#sI5k}Yfi6uHZY~Jl++?V_`gm7g7E@RnK4pd&+ z7DdTgt9HKh>e8$SeEL}&zrE3)axRzP*J9^XENR+f-ubJ`>Tg*&*6iDs*ip<`-5^L(GH&j4vxxDB`k6^&h7Wdj1rA0%p&$p zyp8Y;y`L${b}dIsI?XyhytAAlX@3yYwbyJ{ltkIJ;>&rf&?x_zXzEk9cNCNc~GjPta1rqHkvm`oHO;|KXwX)H}M_H}X>+WdK6^+foXuRP2)x<@}6ggaBZ?PJ7kX-4$dt}9c=M*-$Duz^pU3gjNVB}w~2vdWd4HP;7bsO~aj=b#nw(?~w5Uf3#F zzmKC1*Z(@v+z->qh5Zy4T>wcJ z@wvsI$Z{9v!S<;fVxHEO+GsCFC!R@^Msp4*%r9fe?Y7*Im>~3N_XaY z|Dg(AZC3}IQMbOq%*3XJG1QP8y_wCR9ZQ0>V5HV`Wc$OpcB!C;VIql5f6D^3+u7mO zC$4?DbaWStmuz=)$ zm>aAG4L;kYI4%eC<95z7u3E__u?lj+8TV(&zR3f+Se({fldganyr)1h6mVXh~b&zJ=GR z3%a-w!dDEy5x<}?h?IvJoP|q<1w&eM=gl7i?e{8CXJ&`($*q{6PSB^TGVwkz$s!|b zlKF>#*_U;z&6O%(lOSi_8dYC9z~ma^J33Ket|p|;UNi+m$35=S4E(Kw`De87Y!W3u znPm$IlNRZ&RM_yd3wDwl12gVu^P?fj?WL{ZgE6BtsjcyKIdS*%trzF%v7ybu&1|Fz zY7SF&QFa%qB@KpOirD0neT5U;T;%()5YA!56lYzalg{?th!lZ6v zIZ37G_<9+y)28++7(ZqhfV1U|Kmp10 z)E9L&=50OnPtNFs-M`}@C_>dMLYckh5gy4wjkT zn-xKCfC|{zI?-6M@n@XdjWpIjEMFM=t77#|g7qP4s@Mvky%2D|tNN_Gq3v1z4nFsF zRwc)9^Z-eBVSNbuft)gTHbNkeb{!Tq&2i|}+sC?CPByALe(&vA_<; zn;`$h(0W0`^wh_|V6fFXXR>%2ftc6%* zAcF|c_-%ns*pSZ)6_d^3b&%V70Kh{?5Q;H|Y~ zWOy4gz`^tikJu~|Aj4-j>15;M5R|$3lmNNh@283aYRRuZ`#QDmE% zgdNJu2<3sz>3bxst&;{*8vuO$QK(6Fk)H9M!v#M1!7h@~$q$!V{_RN7?!->XEh7aJ z@h?QjA=O9lDxkpV@bKBYOmlH%Ht|URqNPp5)LBWz0`$_G!&Ow$o1)nc=) zcNv$J=%j3u2bG;M2NB=K*d9;ERmH3SQ#rtMQRux5@}!HUO$?iH;pTLa8X4A|?i7mpsUM_aXTvLyMq(Bt75$+ldxcV^3)x_2XL18f-Rk3{v4zSS-LU1-oTgth2@aFC6@*E8+z0-Q6m{qpbLndIqE%5ro&q7n|=6ni$2R^4_L{`ylv z0XXrz5H(dPF){FbkL27NS3DhWD-YIEEGta*$a`y&Osyc14 zFQCLc&ho?g8cZxIUnPTk! zV%RKqM8jKroVV_?tM%)vbZm@J`Yw@wEj-Z{L620*SI1t|BT4L3DO5Xeey!V*Kza6q zvoI~_bEEKiG?0vzC4JsP1vYoCVAo;>F>UT-OA>|T&X^)2dA=$d&WKi|zFoxv#+d%; z@O0fLIxf_V^?zt_!#52{2n46kV34<%9z|?CH~=Jys`ox;z^wH@QAIYKIa)I+$y_M- z95GaR9ui5B}(XYuj`avfQ$f$M8AoWD+G)`M1vjyIn zW7w^9KAsAp7jYKqWP)06S6>f4Z81u8>%+O4oG6$s>p^1!`FzdVUdy=1@wQIfmH~E9 zwl_}q<07EV9R`y7Q(SJl^&bk`b!>!yLU->u14@QW5H~Nc=c5aO<`5w9# zMB$Lk__#9!l^O$MPl%2iUlPV3-m2a;FZb7}YN~S-wZFJt_ zTh4iIszIH57&Fjf2eEuPFzekb^knDiJ9D&ECF@2jyRi*qv7#&7YDLd&E}2?$%>7{7 z{1Iy*?HIX%yZAUqGy%50Q`!^@X4XJc9&J~E+=|`CK$@g6_ve#Dp?mGY6y8Ii?fA2$ z6q?oDRPjV9%(6~Ubpp8N`ZDoLAz{%luGju1sOnvN3$!=R?0r7KdLvCYQK8hHeVIw9Vg8yO!vLb(E`@fyVUbuy*!5*cn2FzCAvxFq2+OLMC=hTV@p7iD<%brnDwKt->_ z!n>B`w&Om4Nkm%e8VDqSzY^X=?r@Z(eoLd(kL!m+)B0k>OtPtO!8I&|*=E!Xhpb$^J+U5G;rd$!&i{%wc=RL74C_;r zOVopl-xNuFlsJeDE551G(BI3K0rC9H=vfcAy`IF^T0Tv9ven)2_h0|lV=O+D-USAC zO@h)-e^kt8@u};yW-a>AS`7a#97H8@=kP=lgod&_HnhH^RVQRJ@x9{9aVx=pmq~n1K7Y5&5rI`>?=&L!WXv)gtQ@VfjQ z1x1>#Gyk{OqkQ4_!iNFh0UFLi!@;Ocpm1Zo7m8E+a+B55PA385|Grws2O_-<1X7$p z*DRFUM%4`ts>ec(J$qUo7kC^n8A%tPDi)%SLBdTrlCK$+ z($XV}x^zF1OTk(+1`-1*5kjFF7k49t z{|mw?5F_uqR~Dq;-1)CMVAD4kkEQf|hW}1X{xhAhFu+=z>s^5T=c&J~bMWL|Bv|T> zE+)#X|FThdz6bCmsvqg`KgQ^<>-H}L$IYJ;a*dzQ`)?OIccWd6!L$9ZqHC5nl4)fb zzIgw3Ymxo<+uz$S|7|FPM^ON}oGOs~(>KzV^W zZwJO;fr?oW4zPl@^&yY8WhOO)f%L9I_sch&F57Y8bQ)q}k&MhgKjDX<;!qija8O%Z#Q~4~yYbYd zmi@2FqcFXUIEYg_J(uMAplq@bt0`wv7lJheNIU%yVzF@r&ZGEHA1GiC07oA;#K-Xi z?rPQYyPgJBOgZSJ7YVpbvZFDKk2`8F*u*iiD+$Hpx$t~g?t07po@kyg0cnk)`)v{V z@3k|E;CugE6`%27ca<8&lIpoI)>~xU)CzO%HZoDmKdaySB zTCVQ}QS{s5M92Byz4!A3$$hR52PB~SyX^p^q{xyls7{rPeWRGC$pF|=p?37kQByS1 zSE2whey8=*>2F^W_vTA}8ff+YwYL1B2mowhDl+Ve00w*Q(>3Rd{YxF7$@exLsAK?E z4%_C57_#ow#QcoGQv35lkL%l#FaP$@^K9A*>*e>a5B7Rl7TY5jhD*8H z!?NVF<_kB2@POmi^<|m#w-=U-$Hf*Dav7J&)$#VN_;^pIEc}9qL+Z)g(v^VpmI(0d zFqt9lb)`DloGQ~YWM~E!2gB$fpQpMh2!zsKDevLDUW$yhglCHXXj#x#fVrwWthd>_-(9{=X@3$Y zM3$Y|9%Nn{*wrx0Oep3mGo8bCqQYpUL;(#CoVMd+p%zSZn$?*K32x0Qq@|ZTF|!aViBMt3R!u=$0~imp)nn!HW-J$GPaEM%CMlQ}<@I+T<*z{L9jnVPc_nBCFdn%UxB z99z*meAu%#&;tvv}@aZmII(>7;enIpJ|0}9P{17*3)+S{mncwq^&VWH zL7(jH#FgxPEAeOx&l)gjXlm=PS|MUPn zK;Say(Hf37*~QLSJ}(rCO<9XqeC{x_wxwWHwx8fSFT*moQ8weQ)Nn$agSu*ALI8`~ zy3;TknRktvZ(c=#M3V1aYQ7y3rMbD-$Go}rQu@!BE*iqWpX%5PG5mAE`uFC(WkUs!ScAWe*^8+52 zv*EY~`^ByrKxw?H`Suoz*tDDqex|$aEr}!oBdf`L78o-!{|*4|)V8Ue$BsSy*$viv z;0oiPd-Z%&SM%REeDsSLm~|W8?z@*KePH-CPV_OXTK6YCaO=`UM)=Nac~l1NKbQzF zo0B8q0-p&NQj5SEtP$2a&;GD{4kwyrC&`CK3$YYYer!#snTGEsJe$zf+4%G7J(a0{xEy=z4~@(?M#%i&cc;_puKN7wV}LR9 zUQ9JxIc&Z)B&EONphGj^$u7R9QD6}_3B)9Bnz_4r3YcVl13XL=NX8`k#-6}8ekL4O z7#X_&jFgqD_wkX4U$fei{+5F+hXsUbKB*3SHASe6=QCvfT2RX{vJP5$$nJ5ZY_CQL z1kS9lE}Tr|fZ}Ja6TA1pc)pvk(@9@OxbWc1rHV4CUbme$gl|F)jcit!f?t1*bf(%E z+2IK?C_}VGxj#IKAh2j9GBmJt*1%DA`_|Kb${pFxK|l%g&_P;^=Tfd5w>GykbhdcepL@90P=@l$|Ku}lEz7MVvgWtRz_ZMAu9 z70>XgDk1AWx2N!@sp;(zxkoRup-MNB*wgFh37=AMnaKI$oL1{OSnWN%v_V0;CCzM1 zcHEmrY2F?3S_V*t`-U8-N%1Ckoo149DM$ByuyphXI!NbMH56HAx+QOt>Uq4vMu6ONmp7I@6|7c<;fg1 zIa#9LZCf zC)ZG>6I4baQF@IZWGfzF045V@TPStWCrpR8y%AVeC2=DJl{(21wxZKHy6txBhoQpX zvUf{H8QJ3jU(}=AUx@*?K~9ubYoW4faLh> zX#KSEnF$V4YEtfV)2m1jZyh&HwN5q%>cxVeSZLuf9L(*cmNu_Q@qs%x`cs*&CmK2GnE7$SHRBxGhRd*Sf~ zxJb(o+v5x#4YN+cL$;AuzyiJmd2BP^g{P908B~@4`R)rUhsWm&xLUEJ>H2|03#~k) z!q6EznLH9Zm2_ISo=~Xa_D)TjccUI(@g;ECYGYwHbkg@WEpdR3*xhn7tNVZrY(#pe zShi4%y1zZ3Z5a8EIjvB?d9a>L;dNieYT+A4?T9AcP>DkbFBDGro)o40_d!I|9~?ob zYzv(-f87Er!CR!yPY7l1l8O=9VJAFxQJ8(-#8voNGgIOFRO*k~wL+snBu&1VFQlgU zd35XA(r6(~QM(E$&Or4Jp6c^sEZ7e>z;#w6vPy4x67%Me4H%LJF=y=h$gpntVl&P-UcRo_$fwyq=qb`) z&eh@>|G>kv+?TfB+Na0%-Ght?$e0RSKWxz#VhLk;!&#kSCk{P%caBLM+Tdd$lk$1{ zxo94pkU_?jjdaI%AE^D-jYv$`46ohSap=Qb;mh8HP2YtNtNNB&$nVG|6w}(bB3BLC zUsd^*J*@8jGDrA6sO{Tx=iWO-UwyY>& zYKBPDvGt=$vhgZ%uCmxAV1OR20yT!Y%8*iN66qShe(Bo#y4zxR*XzUlGuwsFTluaQ zcAgob1Cu9}@#00GUOk=Qga~!2e6c7)-OjniVF!zkWU;hZp|9<**sGlE4w$GAT=qf5 zJH&!*K8~Jg5R`bU9I1r?d%QiYB8e<+AI^5z3-P zx?%z~2qz+AROcN7JnhZ;>V<}ZW`Y++8bHl@_1odDj-_Rlh<(k+=#$eZN=W;g&79=d zkRGQbFT};8k;-iT?R)xd{RWE8#jlZ5EbgG<#B*(prQvKm^?&trdAY{tGG5b0If_*e zNy4_*iO1%Wtm%~_8rM%L7seMZXWA=Mf7~9}Jq*VLQ}tNZ0L|6NZxn*gLG%suGBhs9s_1Owz52r-h;^X}@ux=n=&H{b*yTPvCmX zxFNnL>rVW{{f5q&4Gzbjg{q~DTt8|=6Dfu5*@@t|uK=+~9J24(x`Ydpxbp&&4xz_y zH{!7YYFOvD$I53KT|{af(N>>JmP6_aKFnSbZr0OXmEXq-KCP^a1$+}>=44$brx62y z2WhUoQVHRiFxRYNQYn=9T0TCmcNqXUylIbq)LESFzi}Xt3TZfJfX`Fcc6eh>8f^i6 z;Mb={2l%F(!4GYXc|ERP2f4HXv4o#rbxk?1}DN z>Mo2i2H^qPGnt8}?4UQaANM(;E4p>p7QR-+zW$dsHRm5mP?1f*HHGfMX2gPkRQgu?ulCFI{Y%4EO%?J z6Es(=w1+dVg&@9(X`02gCwji_0%~$lC1_*`vjelG?}MR=ry~t8w|n|^b@iHi*PC0- z?VTLnr58gZDdwg^WlTn8CdA%qN)kaN6@NvKj~|YEWJDM1oT7j4ibM#76^>tKeFy}y z05d;FYyTcPA+cf~p}}^|Z8BA-*|g(6d+82Po&<{31`%DcW$lF22+<*wk}rN(Si;P< z9<)oqH>XL(G%8`tKp`}k%g#g$*t^)3s+C%Vb+c6K;;|ZSyPi6o3Dx_1hs=71(8W`( z3y8R01YC>C4aM7*;Vh;&$rv9mk7b1CLGpAzv2}Z82(# z+g|lFU#(WAHdBqEA+DlI!RCf9=Ql#AnSEY;l;0DlR{DJGV#ut>h0{lsAS}vlyj;eO zDZw2auFg$>cbQs{!FfY(5MOU{w2Gf;9x!yxPY%~OL@k;2Z(C+~Lyl1IqFVM;VJ`gl zb_uy{Zzr)Y`Qg?t2j?!&wOA*i4Bu>#%zv z>fYqDn}){w>SY!wF$%XYc&c=D1^bfwl2xcq^m`H{1GLDH2IJ9Q4SYYIDOfi$yu(8p zHa&uCce+_ruUfnp(VPoaN%>Yiv?AG2)t(#Fgqyr^e)r6H?s@M8d8jMf;oW-H(Y!8@ zjc0}a+{%G0Q)t?6z_VMp7*8JAVNHPKmsD?R_W6i&Hkl6?&DKe~8x(qB~@{y3~U@!cpb*G)VIWS@~tO$FX_+aI8f zt2c!?ORnW2`CC8sdx-e51bw2w{yKw(+b0w(W^yX@w`4|TTt*jUEA~O{8d@Ttu8B@M zsI^^_5izo?QRSevu|5csBg;_PtGhgQLQo09C9@^DG+ZQ5eamVvmJz|A5KNG8op`u0 zA^6rI1JN1c`Qz0{$D9^M#IQq^%XnSU%GkAC@YN0+|AiH0Fx%|qVvqGoH?>~Nm*C@j zu&wc{&MU0^vqN_|8mW`vg~{lxCmS6szU>QJWI(wHz+MdA_i8i13I5Pif)!_BM9VGP zYw^XNSRh0gsA%Calz*-`k}vDiGLnpl(sI^x!r+Lkv@;Q{5+)110aDt+nlq!d6B^=` zZZk8Gleg%<HGBk`oZgdy-nJ0&T2yKFTh^I1u^V147y%E-U^I z(vB1T&P5*jz=+#Mzb0-d zbieqxDObNKmm!u`HGb+chEeh!;j1v#fn0_(+8tZUNPJzBpOG2>BLw*kl~H%xkU(Ei zHd`3&@deL)jY?U%+lZu`Fh*HdX7zINdzhq=f_|9dzK3hxh9s?fCtOG?&Mq3D$1Sg_ z&Y&ywwUd+6zSTAXQOCOlK-mfd{UPlJN0*F#`9;fO9~P-zk`u>iy=u`!8t_+S z;ziyZtq^uGZj_sF-Q|&DQt2>h{>EpwpLsQh(Zk}pdlxh{@--h!Kvp{0vUQdvoh%gC zjVDVlAvOsUCW#x{CaN>z%hwr#40M0q0dOC@s6T}4cfNC~-zJi9_SZORikL>Ze(K zziYj36f*CfuL0ByhRHk(D$Ow$9Vg*7Y6KPK5(=#}d% zBeCpZmlkoG<0Zl<2359S3IMnnWoVKrUnLm}|Li8yvts>FrH?`814Ba%jNI9mNUujl z!4aeKXO>O+0^9Gr*X}0obw-=r`wLnR=7&%RAC)pXFAAdvYv(#7It*%~)Mm-t?`Q|1 z&cO4V1JaIPl+Kzgbr*BoU$hD;otrn>ZCgqD+Z(ykeAxad?cn&z*YzSs`1wkg7S+i~ z;WquQJGs?{>sHUvpVOKDPJkc()IkuUM5(-XS_qUSAH9%oaT?B+D$w=GbL#%9#D(w%oN2Vs=cZdJlmWQMPPgN89TA=VY{wE3TF3g$=ArjmN} zpVIFMWgVf`6|&WDgVDb}`_Bh|QV9|`6#q~~|FxR`>;F13kkbGE?;!I3$T$SE2@HRW z_)9lJBoyJsJ_vy5_-(hw^e3+v3^jNMIucfmiOyo2QnE&vKQ-vnOTh zO}UjOU*5s0Lb`wY3T*7}6y`g}imX?GG@u91Ekw|cs{JxXqicwz9SME z4ontX-ST3o4;uqSF3T>f%pkPNfTabA}l{ zTjdr#jWA-SmcYc|VWwEItkBnuyuRh#U_X%(OuN z6h2OyG(#4cLttM`JdJ$N!jaB(S)u!NbG%3zb-9N!fjvEbp}!-5F9Zv6 z&oHsN=ZY|OWA+ZZKW^ZM$HB^pOV!6MJIj)D4DN|!!-0Qps&ssQ} zR%SOLHCF7DlPVeZ1{k2_%pc*F_TOW-0WJAch*me=L%_ug0N-JNE1+ITx7TfPQ=VJO zMjJASfSJXFhVowK!MmK@2l&*M{MXOENn3Tqwm7J>H=U1H+W7%{aiKzbONzxPg$Sas zxoWR9Pj5eJLEppmi$%GXcZIZDuhO$myPVargB?9Ud3`faJsCGE~8rJNb%QnPeA^LH&| zH)rRNt7@#BMmWBhCry%Vjx0&8KN2jP!}8Dxcz9amVN8Zwa{h$sk z;IZG~ldgGXB^nd3mbAScOO`#UpXakPpCd(TEGg7qOLT@Q_q{r!)?|5kmBeERp>@T={wB7CUi`2G;O#L<(dZkKtM$;0g)u_|BVAW36YxZ6njZsN~ zj*&?@-0u7hJV=Z-OP$-M1T~~nTcF7=qlZX2l2KFsQ`UJ93wzI9t)3|U?IGAcMmo>L zL7kQNi0Bb;hUDL^gDH{Cggl1f55uCf)H)nuzC&m`7Ys zSLjDw_}bBy$5uB;M{-4ZyFo>`-k3hXQ)rX$IBaC11a z8P}U#6(pe=;>^)B2#o~pJgAD5w(H_{&TR=>9G3=Epg|QS=`8n;<~u5tJ6d0fb9}IA zKY*=O;xW z_JaO9cc zX{`}5b~klB8VPgRwlK-_T|GX^v~oUOV(uwN)k4+{ini>E9dy8h?H(;wv4-jQg)S+= zq92|&${SC9FD$j?+b#Gs~Q)whbP`ac$C6#U#4bt5uof6X0-QA0B zB&EB%djX4jr+c5hzwg=Sy#K)W7cVca%e7?AF~=NZJkRI8VfNmbrv+ar)h@pM#W6p> z6*8(;#+B&DBX!y@eO{XW2;SqOeyJ|(G=L6?B-0>E~t4kPf7xW~fe(PD^#?MY}<=O9J zHl7jZiilYcmP>856U^|1*w+$He*%WTk%bZWTxG{&+D#hktC4239Zj+!XZ<0LDgDzY z)_QZjaY?;fZdAQpba>7mP$Jr@x-ia}*xb*?L-FR9H*-C}q3^<>db)ta>9QuC-o{UA zkB#*UBk>yJx$6%SHKjk!I0lKSE7io*cukC|Ly_{lripaj2 zKkr)(1>H^LM&cr$%hIBgiP(1BR_hm?2wVG$U5kPs9$8U9f8oUtCUcMEwWxp$oh@$b zdgr<2aR|9@u|QB2`dP`4%o`R_!r5%}vQpypZ$S}0+NKF1Pn1!euVE0eeld4EgRtK$ zk1Ax|BKcNK;i+a|?#xskN@jHmBNz>MwhK4!7|fom`AR`MUcvy$P!Ald-$=x1E6^;$ zM_m=9=J?3>kOYGYu$#gxQQJD5C-%H5Bv_X%EM`ldvgQ9?__) zp;&GxcT}CP9~w`~--l_YTe704cxF4>qmA{j;dnL5l{z!~DgL3yaLF0vOVikS7r(!W zFSL(Ku<~`O{u27)M5BxoN3Oc58al;kG9=3J2hKihz1<;!aw>Xad0RW+@|@c1Tz+jQ z#J<-j9~syQM@f#3?M8a5-?LTeoVs0@xi?=scbiU`>17fI;wESU$*nf|VvszSL-^VG zf*tt>;pRYRjYyg~1gE3eKPOi_BA8(JtS{>{3%D(z3B+|>{CaijnY^4m2=*r)?KgfQ zdlmJ$zlgZMB@3#xE^lRg8|wDS^u0+JCc5LS#08;EC^>*twIKHX*N7yp95Bu1ibQJ#kCZdXcaq zXtNJhbCagMWgUsmt-M*YYWz~663%SFZ}p+ao0Ip#ZCI`3jl26FT!~%+p*Ujzj$Xawe1P*9CqlsQcA}jhvRls z6bZR~&sV^DP2b`sMtVaw}%LV>bp&xtk}l*{MG~%J(F1=`8Ki_3YKhi!uG0MhOL!=q_p-Y zp~(la)&N%&@U6g9b~+?){|XY~(J?^YvS#c0C0*f3EXnVzUw$^iZ9<&1D%Cce7zdYG z9^2rYo<8bjr4i(M0!e%);c#%XOAoeu&(?k6oz+{F-tscVtkKSo=9JywJN&F;!q_&o z$02*%LvY+B7GkMrKHa!w(#f|{?zAzsZi37lVB5MV{U;waIH^B+diWDa{{lCeS&wPQ zJ5J9w;C9|@t`GTm?&$|XS&{-N7i6iRHR+KkFn^CvptvP(ajJW1w6(1L?r};<$$2y= zI|B`$G0y&V!66gF7L8mSU^HE7x74^Q;sdR36dQu!L`J$~--AoDH-yA{uXYfCEW4nK7*liTS=wfpFby6w+zC?2)@AHkixwcc{1 zu0eMzOr@T^D|t0mc=5fP4ziCN$6RJ>L?!DkI80#5kF2M$ESVKxXS9} zrcAvscpWYlq3c98;bX~ot-ud&vVkZUNY14XTSm@)P6+wiPWRcgATm%U_UAtdD2P~# z52sP=#dM;<|*tIndH*0E37<+Cp1YqS~TsuyKBW|ivY6%9TmCg43BoF7x zX|dDvg~pRh539b6^9e*ze)qY&|F}Z4S4=Lt+$vXDLiVYxrQ;il2ew-BxfQJs6n1(R$ZzuY|LL1ND zJy}k}>rA_EOQGlMnnOhUy4{2U+0o@k zMRapApM4Q`xf5lu6YZ$9r{8Z0>Fm~S++Q1LzkhYil@f%6)m-21I6N=k2f84a{lnS( zo)^~5-S-cT$Hpt1($cY;B!k|m>ePd%`Kr})vK)fRH>#kw`-yNp5s09%N1{z5k*qEs z-OMxnFwi^}GuZjGtf012Hg&~wuaR1DWo;eh&w=j=;{;k5r5ykLJe*c?=P(7>DbNyo zRli7i;6ttib6L;u_|>?8pQiVSr3ybCm*wprZvgjlGFZAoWnGqsgy~V!z{zd;X9qTT zt9O79q@&PSk~qj(Iik z)OxrWmL%^B5#1{+OOlDhUy(d#L+u%^#%ZgHe|!{M0X{7cj3Ic{ z)Mt=@iv?RPRaY!Lgz`bY>NccLHkW@j2Q--@!8{EEyYDz&H#Z|X21njvT@HtKYpAvodI^Dd#YvR(;XHa&;3>FGM93T z$*WN&t!?2(`zrQePvlkt1MNnM742)$9qS(Jl7P#a^99zD9r;2|FOSCFi@+(Rhk}%KZQjB^NzJ({KVpF?&eFvj!CKRL*;y9yVk2#AwRJ+U~ zmfBIO2`78qebJq|pd%Jq8{(F0gFjNsAAi(YJ|a9TakxOJcWUoP>gXl=+S}}hEk{H? zl)`2X=%X`PYhO8(e5bVzl*f>OaK4s>+qIKO(l5}7a;42%cPSLpW`~_=uEICJndmQ$ zWd6AKg}jY?)!?;b@%96a;B=Vt-(F?EJekf_fswH+;b$)o9Ok0_^^Jg6V3t;?=7qP7 zNkFz$cytz(oE3o8!PiwIwE>hlFdU@){Tqu@zgVqHyI=r;LyNv? z!%K^IVG5q!#pl!JmXt4W(UNYR;zR3AV>=9upv;+7)Z!FwUJs+xK$_ zj=sXp+LfLcGiu$`+Aqlbir*sm>AI&|H+cdn%fIKoeCEZ=$%+|}A0 z?Ul+y>A5DC>~)h*-5{-3I}xWI5h9@d@pU6MQc^{Vaey*WYo@c}TwA$bB59E!{8^D)ZJKZZ^hnWvX@CR@A8d!|F+EQ&Ve zllf1Fp0=WhQfD)HTD^81@$j)6u+*yOS4$3$K`qTz<6_))r#sJPFO1U^83S4U2Adw5 z8zo1TQ;S6f0jJ6LRg1OYzh~Mp0L^)9o9sQe^(vCW0OnUO$7rB z6DZ&`h0^kSeqU~CciZpgg05)n8H)_7S?#u%}<#&{&)`aFP*OSV&`yUQ8)FyIr_UuQj#a1 z>%=h47~W*oN_Wc?(5}ccpCq!*K?&oPkK*cGbh6+!oa3c;%f8G!>PS29(Z6Tb-1cL^ z*U5n_m&2hFD!9psEW$E=sSm6(b^SFtt$EYTa-Ya>sk@?r@F7E2j?;mo_U* zhs5YF^#Fdv z9FXMKBBoE(<|)&%XG`9o1lFyB{dvicvaZ7ldq@|Pe{|tia8JmbHfBE*kQa>%)3hUP z1YEg5RpJZvJ!TkxCuNr!e>81n?(9kuQ3(8i1c)pNx9F`|jxJzNhS8DYQ55?}w|*a! ztS!bDt8-5oagOtnc6505{ytjGeI9z$Mz9?C==%juuQdyiE~`)NDeWdadYEtluj@p9 zPSEr1sKE58qU9RdA;0*+X=Mimp}XV7q|i+pOoj>e>bV~>q=vZpk6R!a_*6CM%3@PK zSj&atVWB1Q!LoD^>-H0}9$*R+@Ey51RZC1&uhItF%?2%IY+N9~nJrU!6G=+sJfLIp4ap>+``yL|*|24-Xn z#-qO$pd7@yj@WSwoTH#sn4LC#tSBpH*N)4q?{L87xC+deCVhvR&&zW_itZ zbi!gO3>gYJ`m?Bq^UKnpqIp-OXAO!X(G2 zwVs<6Mbt0$=QIIE(DkTn>V;ITZR0>hZ6z0T5X2ecu1DofISRV*Mhqs{1bWr;!lJEF zZ%_wnK4S435f;Hv;q@E)x#4$C4Fib1@QVI@r8IgoOaXm_%d>gfDmRyE#g61CW?iRvs^J)TK4sFFsvUrM=%A3`qrs#E#daU(L`m>R_C9c-IbqNbZG&|Yo zS9jSOEEgZetj8B>Bm^=$i8A6KO+O08c1e?WgbPqMG&A2_|D;C~iNxKWUaRUw% za&o`)E8uFsew>oxClRAfdJumtm24sB^aS@SmEEi&jo)6UWx3JHUA-(ER3K{SXtR*h z3?x5Fp#IMCKTDZ#)hnsFQeRKMsW?JDjE-%#iBDJHdQmM^D-F5C=keT)1X@jWiMecw zV2e%L6jcqI|IXOhmB`zTebIiv>1Q!k;ZNi@=sm-8!hpu9gKcS)v5CP)gd>9t(5xut zA|y4OjZ9Ee$()fE&lP*M9`?3S1v*F>C{g_( zkDTT_Tq1XJ<(tN1PCPHEa{W-_Nu0uK=p^n-hUI1+tO1Yzg`FxHZzOYWQ6*17gwd;9 z<35T1aa{oP7)u}-gVu*iK$qp(bCJDj)3JQETt6^GT9 z1$y ztaas3XLb~W;G$0688N<4cf4dvuJV#xJW`@VPN%*CIfE#u2|rzOZf%V3^8!P3y{}Rs z3A<^UTnaSK>taXULj22GhYH^nvf2H^*}y?f2w0Cdl!Fo3;1KQe8Mh@$Ay-+M7I2H z9)!^>ab4h>mtI0`%!766D$^5>+6a!m2&m+Z-vW;YijFCIF3Cbc)FK$8MTK==RiZ*K zNoX{A%i}3m*5+0a&Y8reB`OWLZ6w1*h+TGzXTxgT>U~Y52hcthjgMAwy3q@k;HkcN zUzVj2sY4sxdV4I;L*s)~9fZwaAS2UkB(8Clh+)g;+(%8NIoCg-9*6m49B5VWoxSy? zkNwqkrQew~el75{;wN^JD*B6Afr3twptq5TeXnJg)%5Yrj{Rfti0Uo9Q4p#)%Huv_ zqj9n8gBZ5&6$8sda`8n&_vP7;*qluglWroPjAwteEV3xpgUxZYze75w zGnCR2uMV%2MuVVI8i%B^zYRrHpX!L_B*2dG!{WUQmyvQ`RW{Y+iKgL>{NQwQ# znY~6c$(?H%6ev0XzR#ymG6u)0U=`XMdTG$jQ3$k82cZ>fhD?DfpFd|g_CyamU=-~^ z5UOu!?m4d8bz1V-O{bowPS4kf4m|*7B(pkWp}I~w z0Cv*JKH@ez)iJx-^dfp%dwKRuf4;*--D*{;R-h19b4)83_sxsY1aG@~g9wG?T)~0&w{u=h#g75iK2ldO`pNSdl5E@*g}z%$o%l{=CAd3 z2XSUpaXVTc*duwvF7-nbYSmCaxcRIP3$_xp6LWmA4PenIHq;a#Cwci}0-J*&Nf$T( z560RE!ZySbFFNagDvAjDV}hllQ=||-Hzkz%l7O=oIR)ZD4n*tsYq!E6=aEFRLUe3p z*zodYvx;ZJva*fzn~Y2j4Q7f*on_k0%`+wL^HzMPh55%0~5`NvlF+c8)|Cfm$_aO|t^4Eu8pce>uswDjm^gOn=6nJgmKiZw;@yF_{ECPkr! zEIi#c{QE9!s;+R{a%Hs|Eak&*h0N2Xhb9@Cs zr(kKDtk3oN-P!JD{yi&*fY;~_cOC*7w$^COz?T@xo4-=qmUoCU?Tyk7fM=ovR9}OiL&1Hk42xH0Td5997Bu6| zU`j;h-1LP)OMhF{QP;5LY0)YNo~?z&65{%0BE4Mvq8p4+u9o3)H$PtMz1m#4ug!h71}L1*q|p4V~+n6 z?QUIApMb=^Qk2(d{&{hDTzb4w@Xt8y<15NT7kwelDbrCw)9_DNrB^&2Bk$W#{aye& znTYRmcfRSw*{5kpBsncbjYxd2RPWvV(knx~jn<84>>}!TGagZ@HB!0|pKQ}?gmfnt|6jTFF z6YJ*G!?D}iebP9UUk)*xA4G2Qc?rjvLqXUhRm?s-FB1dUv(HhFKqSvUE6LtGRaK7J zh*5hvUxJwd$??(D_sidYsC7wP7E$F(;=XTsObb|OWwysHYw)C*=>4=#vOHhOEjs5iLR5+Mp>>xy`!!?R2-=iy=|a;vN|eu2 zjgSN_Y`J+jdn(TLScwa45Z!@2QaGp_e_4;wwi0J3@8qDjY)D)naQrN2d%M*5@C&P6 zKIO{a>*T6^$iAXdy9K;l$^UdRU|<-}Glkd^Y2=|;PwS@MZI_;GiExttL}iU|0o3R! zN-#F~N!a&@bQ8gCvkPTv1DmVC5M@kO=SUBT7V*Z188sE)ARNp`4>PJKP z1M%gjrL+gc7OPXLde!zOtj^;cb?TiTvxR%ZWSgXue;EyHb8?M5Wj)oDSnyZ28sm+h ziN*PJ;2=hwSUlK=+Km?eW&Sk^OIL7*0|8K(Z|nos+*^VxTcb=O9Z1x9W<2dZ!jEiw zoprL`2VCKn9nq7rz68r|0@b*rvTM4Br&U4Q$yCm_$F&p3U@BjEkIGTl z@DIR6aJt1O4d_TpvuRtMB9j|cAy@($LQ0lCB4U4Uqy+g-K*V$XD~~%+1qV4SKSNM#_ltCr=$Ua<@nbLUaT3jc+ zRU_?_!82}MKkn>WOnw#V{`}r_md5{}!TRNpS#Uowgt?7tvBqq?fN8Og_q!(>M|Upq z9_v2chAdSeJ(W*agynK?rdPOLEPk7UL2DDmYNB!$DpddR_`EMkZ#X}Kz+|hS2&XUR zF?C|CI29D1r3I9{iq)A930Q4DTz96%HSfH{`V zWTD={5H{mMiu5@Zyg$CTrvhaBl2OFg7fn}V#7fzsDy{f!V(2f=!2W zs0lokX@y3$3>KlnUx5*T{QZnxZ@tt|?1p~?eSzGjJI;;61)$-%`;2QFj z`18YjXK04*xmRtW)x9=PCPl`(>v!>@xHF)al9?6jZvUEfg_N)__%PS(U$OQ8Ihh)9 zEXm#l1ZL%nx|$u#@y|nD>#rSm$C3oX7k(=j#+ymQIQyK%Xw_Sk5IpCyxg5KtOVpam z^9NeiiBWk7T@H!~p3%Ohk;6q-^5QBp< zeou=nm;y- zZJ{j%_BVX4+Xe#f8?^jZg`tbauODwZ3B0^SS<@VyI8b}B=k^-&eLq&V7Pf*aF1)nb zT++u1R4(568Sk+JkfLJPia3GSZpcKH?;oJTyIn9DII(pKVf;@1_NEOil6VnxQ6+Wcg`K*5kEDb*!E-}foS_a`AfhhW>0%; z6%d}g_mci0{yb=OxlIeY;b8tf?^7Ah{5tYHC~2W9KuFC`$JNjkE437~7=rmE2XG-i zTgsta)9PgOK{XP8K`h zsW6CZUH1a*V6r=HT*#Uo_b@f?v}$RtvBBQNFwE^S#(Csu)9ve|34Uo=q_PxwFozYlD%i*D(G%7{8n4Hk2Yov`v5dz^$MryV{K|#$Nv857vRnN>%C1mY>+Lo~)i}R}yrz*vfC7%8 zgmF*Q#U9EXMx`%(caE>yzhv;!p06%tZ+1&8ap0A?27%JDdKbphGj6aQs$u5Vn(XNy zA(!2KH;pzch$)aMr?#(#IPKS6=j13&*uwnGujdLIiM`gtTyE*V4zw;B`KE^#)+!*V zhU^idv#S5Y82QiKYD^4@+Y5R|Omx56t4{-DU?g;>L}lB$+#@@HtPaq0CRQMr$3pAuEkx{Q zny@gC00QDx)yR!}AsR9S>z?UUZ`c%F`Fxjq1VAE9TJ$bE!Z zc=8qu#U)n{WK2bE0!A$ca=Pq^)l)k;YTrP2P;v2M^o2~|{KCe8S;7ZZr&jJbu7aWg%Sz`dXNi@n$|r70 z@b$?<#b+IqTHmHU-zO*rZ)gWm{hK|QNWfoYT%Re?!HDDbh+wHlS1Db%uUc+_H#%7- zUv~@evJg`1Aq6MJ6$HS~ccPZcuh8rrdk1a($M}-pc9O5;pIBpq{1WI?RX`q!E9VV1 z3k6FH*#Su2rUI_3{Ib`my?lSnB$tI@Q378XqEcd#9{(V!nA38Qye| zY3-ri%tjr5zT>_&M{@Ts6)8~X!o~IzKe}+%yg>+A@sQnKi4ha%Xj~;$O)|D)MV=Zf zJ~hRz!}gjyy-(07wm=R=6+zDcNd?r!);ZEWOIc}GW~?~jYz=%KxI0`_Mc81_5U#|! zCWnS}+|2SNzJfbT*~{>hbf$`MAqh6a6(=SHZJ*?PmfOjqA4Cd3JgTslLaN>bf{S%v zxy^j9yYos$5z9XzV#e#8uDZ3KRz7c-sMa681(|0Y+{FWjUsz`SEw{(9pUJ&7GCXS9 zPNTx^yr?9-sMdVABtN8co7dpStdh^KT_*G71!fs!`kIH40bDob4DW{A1dax=PwbHC zxM(zf54NH%JpFZ8n_APS5&?I$(mV5hPb|atf)GMI>&4|Q-V&-YM*gTs4HU(~&(U0} z48b4guF>cq{U0*Xr&d2>udnM)w=EqNXpv2|m7q}=hSh(YSU!X{dOu*gUyWwa&GrjE z0lF6+kCp{i+`Tm>Qs*;BFfYVIt#Ll?L#C>Zo&swEWFO)lf}7y57iH5}KxQ*#GMspn z>~FfnXWNkvCSR>Eq!M?|6eZ{-(DPD73+~9ZEao+njQRA?`zc+(GYSF~BM$v$dvid& zuY}rYJs*x&8c(2Yd$hctfb)#>93TXpr^{8$MV!1iD12|!kFJ&6b5C9j#(gPj-PH$h zIXWw79^l!{KwQ1*HMDV8r2!m`B!jugrk{Vu1Jv2F=uVwz=KS1uFj!u@oMMI>-YMD| z=!~YYa}i~ETzA#!dq$~`_$JCNIUD=0FdBsuY?vCbGvg>>-zabF|FAK9or$G)*G(Ie zb<>k~;PK7C%iUWT1+>3|}Ke@H(AOi2mXBjKJ+A>xhKk3p;aU z6xy*gZsy9{cpaCRFiC%K=!#dt?0oCVmAv8jdtd4<>GXl`j*u^5oR&+~op!F)E)Q!acd zPBy2nw+p(=@PrTzzXs?;ucL)_sVne+(GOA^xWA-VG+Y2^eT`>1Tj?oA$TyGUsU5<5 zDC7Nr>cC?=Q_$Okdb=&Vt-+>JM-z>m48>4G__%)TaDGCfAsv^~lD2EkMpFfQ%Piv{ zi=Q$#Fsod<5rNvvyVB5z$n&XQy|q`%jURHfG+<~pDd`Yi(e=%!ikPGHh%*d=CW0QU zf6CntWK0MDLLoBg)dp$s*sH`Z3_V(|$i?>z`@%II_!=Ek;Bl$^fJSW;!6qAQVWLfE z8fsXirFy7j?Ez%*iVKX&kooT$OZHl-u6vFHPn^W`4Aw1oJOBr1Ojc$#jh$$ZM|S=) zaK@IQ$1VAKik?%PdE54^J$UEN>h2P-043afDDZgqO)#($pcm}NDzg{{39-X$#-64` zwTGno9#?;^KM!)+|G7is1-SS=jRCz-dt-cOy}dm^6pV>l_Dy3TGZz*iIx$Q&V$NK~ z%8UCkmI2PYDCNQG4>fv8*i@p`z&=u|#o6r;1#4p1N?2F$(e68$IjVIRs6sk`mcMPe zOeufAA^{oGwoxkWC3SN3{lIq9ZWEv>rB%rK-iG{^pu$snd>-KZcrgGaVF^H3_2c=Fq`xZpYOjv7)2dtW4w=f?{|^)PYlW58TXB!IK7udW{~kjEtebS zszx2j*HU=EB)%GOn>U9xt#JVJd`jt;FnBaiVayFD> zWB4>Z>gg;@Q7*PK-`C~N5?o(Up}h!@J~c}--D+|<$fqLRd0Y!btQtrRXq9?b8DEh(b(AEVchHWL znz;^2k(O>`3dv2tc9V${>mgnYkILQ8;~(Dp>Nr=6^D`k^vN$#OAtBn*MH*^;cK#d` zqFd#%Go&i=_{j-LoJ;$)u+Y3$+ClVNKGSkKU@&oe-n~x~bnMTN$Vngg=1=bZY5L}l;!}3Z-e#Zd4aAtzLzMWr zu_)Pvt6T2AEB=K41-VGju$O?%Wf?w&^W+1TJt`>l&g*tX!~J%p{il%cI!GF*w*7oQ zYqRYWcAUilSv5QCO^fVTGbTIQ0g*KN7Hylbd(e7hq1g$cUy8zAX0?x3s4zk@!aq7S z{*BNU_NVxJ(*Ztcv4%Xs_OWv{tzg;@cC%b@tW-GB!I?|kHk#-6gcL#VL*4$v5EqRe%&+H4P!V?mElU zdcZC0A|6cR>lVuWCS9OE;+D7GQX1g%0LBeck2k_BZriZ~J(uw8kGIbRJB)a9I`a2` zF~`l(KxK4bhT_%vPqDcr)gfieE`JGV%sS)TY4rz_p=}g0`({Jl)oLT(bL_U$md1LO z5*5Yh@LvGE=WQpkF4>vomA>3Ulg)4p<>9X`$=;Fh;?&|5`Z12$S86#KHi^ph`Q8t= z(qp7NJ8ieC4V#YHrMU;Ep-(x9xdp{a)Qg`pNZwWa2jEu$9`{6B7;v$ua&=7cM!oKh zy30LkIo>6Pf66Lf%3;US8{Y2^VzRgcJj*>CdU_8uyElX^|;c`6!yXAZf6)Fa0EU{Rw)+pZR;En#~)^yh)1pXAcv`2ASid*Y|~xG^?eK zrE+f)4vTN2OJ*#SnK*`(3;r)lQ_1)H^KPc{({++REM~){;|esc#!@D7JCer6JBozb zTSL+w;A==v??dix$AwaN2FMGG6hE{DRG6t7egPmF)g?gm&BJna4IO;i9T~Hm=yP-U z&2Mszr|L{E$JpM|zJEk>c0Y4==!iYEl*@6~YR|%wXw|ph>&J1k!;D2-uVuI&Oyp>K zOXrA(yu1KQ5$unDy}p5>!QUM90S@pISI-tjQ0+Y2b^v-KcQhTYgEKYZ4F$cSb-1eW zRqwJ5xN_8k(GT9!ObnBj={|M~#$tD{(c+BGee^9@zWW3y7GhM>fu)7kl_?@jClUs9 zkO7gST)QwT;W?L*d>F8~mzl5!Ok8?g?=!-!y(p0R%l&J7KW)Dz;U#TMDRNLipgT%A^dg z`%?4SInL=dCS}tP(wiS0RyvJTHtKOSKdw1Lli(1&)>Uu+Mq~ec6?9x#D+^3YWKE+g zylZqiIAb#o^-SYFRxfn9z9)zNF5KLG>vFM)a4>hKJ9<})IX=kTG4e^`{{~h6qeT=z zl`K|7ar+Xnnf}}wh#J(IxhVl|D%W*upr)v!v_jYy0F3h1_e^B84x}-Ho6=3jmhvXSMuhW(Bv~r0Jwesqaz^1IhQg znpDU~2f*u6)myF|EUJ#kkcO}W7?nC1&sy!T&9`K|NhMh=IU;s|D(J&P^^~`hK&{c~ za&vF*AJMYX+XTDN3Oj0w+={<1YvIXvF-1X@ zC8{MQNws>{XbKW|^E`3k56)D_MVH)S>SS)7x|po(bKb68Q{6MwTOSZEz$3`9ex%Vj z9j6_)Ouv&U+m@F~ioLjO94J~m@-B*K2ZF4Wy(m>)}FBMJ;s~V|mQ+`fPnQT)` z;9NNM`(@~fxI_eg0&tczj6O4pL3QRWU>FZ|OZ>(4mF(>nntwJ3(;vJ6b zxI;SBQc!sbwU+@PZsps~C(w(*Lh_B2*y-CIdpT8OR4J=&asDOsjJEe!C>^KeaeJ5@ zq&#i@{1^n?QtJRHWMp>wp2urd25>1)M>--&N>}!~+7&vQCnzB|?PGq9v42lid zr5t)No&7F(ze~7#Qlk}^s`qb~ZxDuR|A14S9Kf2Dc{js;)09)58qd&?>!GH(W3MyCLU4{5JW? zW?4b-ew*sIi#BKNkn>{0Xd7U-O*IsSY zNreT45L)W_TokJ`Z@E-@vt$hx#rEVU6aQwXDb3<14Dr2B0Zf^AaT9r?fU4>Gcmg=1 zzBtRCsgpK3C`K&rEXuZ-EmDLVWl6JKX4L!7Wr-9a{Lf^ zyZN|c^DBk>y==gdO(wCTlDx{?=#ruQ{DpdSazkzuGF{X-W8*Rd0_wIpzfF`m!JIX) z@>2yxnZ381We~g_@ILs{CFXE9h~rmC9(blOQ6nUr@&A z$9}ebP1tHsc&Oz8L=G9>`OD@>2&es3gC5#6B5MJgdS3bC#llL_sGW9Y@M- zWvB2!{Zd+)qN6d?;_&xB@e%)8H@|m+<#RcmSZlmUU-Dv+0c+JH&O~zf9&Rf>Vfukc z?$*804${f5-jbQ?n=T024o#tuz8oLCP2Jv0xk@WF)M+m@_qODVPZqa!3j^G}X;$;+ zNfc@dyK*VlXeK(OpZqzPq1M$*heI4`cY(A(YU^;OBxPNKnG}iN;5>{m!1d>f7;VFl zNP83QUsyk91TvrPG|mP19AyAIepOH3W7%l}0V|WlWFzeby@Z5KFqV@)9+iNpOel72 zX`5j1p4Hr29;X!3@RJ;xN;__U6n?&tA`v5T6lf%<%{eKc=|A!talpG${JCd2t6rRL(?vxdv z*@qvVy2ruQsx9FahX^H`6gKuAnuld28?=k?uM$1jRdP36N4*cV6&&06EwM}$k+Retwv zKt>?5SZBV2KeKZN=oWcR=YY2Qo#kf78p~U%D>*(bG`S8x; zeeK_Q;7PODA;H!niV?fDg0736(!;$AuA=Vpluoar4*!lcq+jA6*x&zg)1<24f7YFw zjA>2e^ucEWWXM58ymOzWhH?k!wRyA0O*m9X^>@WV-rV*35;UsaO6q|5Ao+_9Z6iR| z{vOc7{gm{cERbhxcG!;bN4S!8!3zlaf9P_;VqXO@FKNmHYgv^t4G~|fQl7uM7{;Rv zp?|Re{tL|Y--hkq?ElyA&ZY_f`(*((37yOUV9N)G73P0`cw&S_|JuGNGdN~b{`>CY zpL>k|AAN#fypJ-=zb}JuCwx>UFS(r-&41I*`{|}~ zNTZ>_y%Bo-UIqOP_A%2Nba5GBVXz2TTNK=KlxoIQ3&?o(RvpI#ZXL$k#)r?UY606* z?aL2O2+vt-9Fs|wDz1~ehZpt!)Of&uN?QLp>iq6xzmw@*+%8`Z{&P&*z`;Djx{_o$ zCjGZN4(PlE+P%ITaQ?sUr`umg%<5ir+yDMOFu3O;-@B8rSVsO$@%oQ_II=BlCEmYVJIoVEnG! zUc&0@JBt+LoaDh966tM^oL=4&Emllp5ZFocoX6!j@Uv}==_muj)m~CfzN{2beHUo3 z5BlFnZ+FUHjTx{>qpUR>S6kKnT<)06<7i5&Qk1nloT?5qcoYldM5<5sMGzqT;{nfT zdobP`OD{P^k^Z^=>wT={Avu3%{QF&PzFNp@bzA`UsP4aJuL;!u;~7DcvB3xmH9en{Z zZl?^->ru-+#*FDdD;KNjDAKHi|G(@gD^jGbNRZ!-J1tc5vG1wF%|>V~7kL$1bhLXx zd!n$()P&hbz@EqWo38>u_}=?Fgs;O>$`dO3lpd5w`Ga%ny@hwlatCA}xMFi?V*YefMyemI0NCk@aQd(yaM#*$OV8XVc3G+B@mCD?^=4iDL zGZz@)5yLdd{NG;2h|fqb@m9?CYRhKFkTXCP* zy;w0tct#v`g&g|)09|8{D4!OD;dq1Y!g8{{PO+baet1ZF80GyuOV6lRY;R^tKE zB8^30;d=Kv@cEL0&|yjcIpNB8R*UIMQQ%2M4#=GW))vhURy*S#e;-<(X6j{!(s-!3 zz!8!on

    FN!?vpuNe)7W4tmGAnB0!WRKACq_3SFi{8OJJq!B zJV|e2wYY4%X4QW$={Fw~Q6DLP1ikO(8C9H2Z)e?=;ifBTx7ZZ9D8pfWx?6j$Y3Vg zN-c}ikpf$5yVAnB&=Uj+z!z^nW}4n)qA0aqqq=e*|FE!obGZ8w%pcIMFp>p4<@`L3 z0tM%bbcz7DKFYLyqwDjp_h3bolvg_fMwb_aR%_Smw!$bjI+Z;#^~?E?Fq%#2+3e;5 z-%)|-C zZgo~G8LHMD#y3l+j39A_^O=)N=j0VA;p-08LK0Y;u5st^L4OfrzdN}*+!?&sc)LBk z$7NfjAB;m^Ju1^70P{;@rs<|-&cmvE+z`KZ>KItjiiDhPo@^ADIxGNNrbV}zCRfLh zJ)h9OEJy!<3%=lbInlfQrjrdN2OeSOYd1`q^RQN#O^x5kWw|$dBip6Xsk8J(@)g)w zEJ=-KAL;L*!Z=C&7OF_vIVnX7u* za|)D=dBN;ma+f@LPyD8)%CQT4@(Dj>7t!Rzm8sAl#=^jz>`rVsH77tZcgm&X5DZ5< ztWP#CM-+>-X1)Y@b^o8cOvEF^4zrbhC)d{ZTMkLvPa2!OnVtCV*j>DFidhULs4uE4 z>Cr1QSSVbw<>OSQLfOS$XT81^Xan8jnyP+F@1jQy;#)?Kt3ipcj#gjNa-~zO)l8Q(zUR~BL2zGTI2HWYH2S;L1gh01s0LAM+F1(e-ZanDt=U?Iw873CsLBSre6KfcRrk#=TqRFI$8;i}tU#q`#GDk0 z4|C^vd#s>+I4nTj2LgKeoKSA8YdbULQZy6+-LJ&G)`;}r`6_8EvC}fYHBTHs1EK(K z+grUqXS6$66h-%ur8k8qZpT2jH47%5j7_cGp4a^vMX2=D2LWw+t>c)(bXw5nS09LR zlZ)FoZBZ{bjJ;7px8p#DI|TjH@AR(@f#S^S{YiRqfKmD3$c zcsQ{HVsX?57Yn=>5y;(y6t0ij&G>ON%f+Gpoo7f{-?YE2ToV6#gvuKB z{4Xiusl3i^LG(~=i4vVAl>v|0YV&=22C|TI*P@p0FEBZw%m&@ke3bf)MQU&q9zq{> zCY$`H>a+_*+E*w%a0Xbo?01$l0@~K$EZS7_oYJ`MrQlm0ytv*pY2|2Da>H;)|X{Zd}QiZ#jxEe<^owQ0TkVmxjjdFelTwMy%o~0 zdOi>_U4>?A6^}8ohdUMX{t_@f>l-fUUJXRPz3Hi(&RQ358cr&MXk+ZUour9kr6f*>sr#+eQVE6HaUJYsjMJTS3Z= zkuSV%eX;aw6~(sg_jfnhj%gmmZAu4Pb!XDNylx>vd#%APq<*;f&3B7OA9&i#^2)LO z&37lwPsYcF`^YvH*86i_m31@7{6H@HFp{BSjS>?YA`XC{!txQxPt&FE5Qh8}V0~ zg&C34peux}fLPi|iA$7GD+%iK@hEX;VUtZrix&bN=}yzgPz7}e+$Jw5l2nd$OEjH) z5&?+`zJx#8(ImQyYEHxBbRP{^0bRd;kEE>DX2sWd4sBq8q`2k^DY(ck@W_o6XiV@F zUcb+gi#}{7{8dQs^IC#Z!X-#2Bca_DxmwnFpy_I~i+rMgn<21p>bHGU2$me(c zI3~i!OIyD8CJ)wnm_7iQoGM<~@`b*%U* z$<}=LeCm4Zb_oP2rFkC5$O$~Sq5bX$jzW}JqUe- zD1LE{4WBV)JdWA~47dDU@?P`_z1V@CMNdXZ2D|~ARTJe0)VpPajg`}4vuJc2Qn|D3 z1!+{rf*s{w->FHaoJTfRCQ2#V8FbF}PxCPm2sPT@$fq4pE!6W-yN)X`9p&eL3FURR zKw=Ba;ZQW!yMdgSvv%TLnTKj{JM_tWCH!)wmbbrHP6rhFgkbiLe@!4zE z0M&bwq|c|sb~7xez8iD15eV@9Q{f4C@`sy`)LAgkpDaAK+=lK^{{!L#^s8J9geRwZ)>PBM`(qpTU=iD+`Q$y*Z2LWYs zti%36*%0oDOe1@i9$0G?u*nAtsb8P%A6AJle)5lp7ITMFSJSbErO0_tI@sn+H zw|vW)ofl?rY;-!~Sljtlj-jmxK71mKUz-XZE!>_$cx(eUvb5_+15vbwz2i)8GVN0#KUHY<6_a# z(eoM|4^vNCf3^}Y@<2Tjzg-=T0w<>+_vqnDB=m3?dA^wm;g{vAJEI#yX7(2c4~3O{ zkC_pL7c6SBft0}Uv5t0w(vejznVk9(e^ceFSgKGAo0ROvDPX~`YXo)M5bY=I=~F^N zlt;_gy=kWV+DNvitR%FxQ0TC)p~CJxIcHuUR0Z$zVWP~y^@fTTwItWyd~^PFYJxZY z*KdZnmNNXYzwhM5>99b5;RFInqdXWV=oigB2b{c0zk2e#MrK%V4(n50Zad;yT! zE+1y6Ydy@zc+>nFi1(JWU(n}zySbQ+YX4_+1cZW4u&etD-I}>9mou9dRTzm_I(r{$ z(jKic=&`r9`=jkr!o*P2)|UY;g;crWfzY5VKHbz@W6knJg+aI6jWKPgo>>TL+ zrrC-m+I0qpS9F!PbXkQmVQ;oH zgF|^n(4WjzF>Hos#A4i&=kKKN4P(&YxKy7CdPKAGrdafoomWB_Z8qN=#0H-s$+O~p zQFTI?y_%hCM~P|)Comt@|ChTzsn>JFds+4{AL(SyK;2pjzZ1ir2!6RU9QVM|pMll^ z^oGVxyzbnqltj!e5&j7;go&1r@BF7kYiC9>yA3A`XIP{=BfNiNKjpDm511Ip4Cnip zO(8uk`oxv1!#Efj+C!lY0-dtUA)PYbdSd);x<^~TvE%U)h^qFJ??4{)K*VpOhU+^e z50c{DRD6~LR=hz6cTV$MYBAEefKqPHg8}gE_0Ey33y7W1kj(hXVLOHZQ09s)`SReV z3^ycjcXIKbc}@ho1bSeyw9blp{8XRe%k6V&3+C5qrQe3Rrz0qO0>whMeA#74U9%Pj z;<^(x)qnkK4F3`Z=6DkBwPX~cEF(i_CbuKQ+sk@uiXB`XPX>Vi9Yl*XN?S3mB5#Xu zCFZ6hAp+-VV{eO_vD6kL#oVVdXGb*)@ic($@8W!vF1}td+yeMhq^sZ$Ziq)X#PS@t ze|sSB{PlMbcZ;Z{G#3i)&5D1?Z>+L>RVg9t2ko=RTWL-i4VMJvAN#oDjGaCa2*6et zN-i|*TX0y~q6`@Xo8HAP6Ddm$?=#zj^9Klu~PyVZ23h0JiE$UWPJU?;U=%eKFeE%s3dGl!g3UOp@k~DAG)kuWV z$?@Ze*_jBJD|^OWB+Y_4TwOPN03q0TL)m0qG#-Lla2$rr+G33rIH0lD?BA~^Y*q>wssA^U?N5XPbXfs4ZK~j~m|$wD z;0}9l$n2|sibcea#r)==D|Cx%ww0%sU=dNCF=@+p&Fj%GEYMamtA%4zX9B+06J4JE zPT%vvq-wPV!R7ra$nF~V_(7@pbcQln5UihtP*y9oYH@P$zSxLJ@?r>CAYNhsnsGC4 z)@lZ-i)Ws0JPB|f$L8*75chKYr*bg{^|E}f4{Ea8o#>$>^gV$&SfxyCFXmwakS z0RAs!38PPQ)eH1V&i^`3^rT2#p31!7-%`vbH}07u@Ym6=HV=17uO~*e09-q>B@I_w zF21I{K|ty<0;Dc(=!zS(+$#qe+YXl=#+;hH@30b%)KZ}(wSd$2#~CsXU3vW)tyQ)1 zvT+-6&ByCA^`XH@?v7N+XsUjoy(nx{c*&55&nNs5ogVkDjQ+% zb_%z3rBQ$Y*<6Vag*+0lO`6 zC|%1KNCd0t^Ef-EE`HksTB$5Z3y0y+4N1=(8gF6elLAF_q?@1R^qMefWQ^t73~Hrn zsZQk$ZfD0PzsGz;KjzgJ`J>ad&#Yo?i>JQ7*F-}Sy|4Vp`sTDhu0*Yqsq%2)0=_>= zLd1Wrb#7;Quw%iornKBjPCkv_peP9zb|_Bg02H;Jni@49bNz!8&F>k6?H3YV?f2{L zcQjglRowtmphPPSoZUSZrHoLaZ3Sb?tqPbJmhoPVvz|=OJt-s)n42h^NpT?}&h~*v zJ+8N~N)@=R)X=6scn_Jxnlx%z%o^c{RRJy-AFKt^Rx!>n2`Qr9{2Jk?`pVkT$ZO zG!WDy>e14$uHx!@;2-bSj6C-yhN>}nUOKsY$pQ)}y$HnZs_KN73xM=WvEY2pCzItF z559jP^b@K}XHxuv<-_u$?Q>HI;D416D_4+%ly{wBms+7gVlK1E7c*InhPvnsnXW`C z)tz7@kSYK5k(DC)&jy_)?#F2FZHa9|2`21F$aD)8F9wnchEM9H7^twNmaY{d1@8`i{HP!bx z?TO=uy9>J%KWdOV#|QfsJM3|)&RVzpqf>_@7Ji!&kspeEIv2#KooNOif!7t*2Z zya+m(Pi)amzs;@{HU;h5X>_1{M1U3c#PYg1wt zC)GiQOcW@qG8#q$Y4D)pcN@xH!(i;^wh~R6{KdROOd^PjI;uV)hZDuJ$n;t%oX#5P zCH({F+f5vYFMbbmE-d~afz{XD^4cUjw!X57qLTfMRtIJ%&Ce#k`KY*k)fYUw_LPtC zyc_S%8e0DK#Ep41m=7nSBH5Ox{ZPmK+z9X-0~X)xb*~JOPhS7HSd6sWzC|XsP_}tl zUQnwKK;&{ib zU#!CK-1vXVH#wyIT8PsYy+zalwN zb65b)_s9hcj@^7#1Q{~TO!evq0{mmdDHJ;3)s=W-5H65CMW&BWCT%i(?+Q)G&33!8 zseXZh;?-GoXL{(tO>z-g7qUv?WBt}GUZCOD>YITOMf`(EnWSo2F5vwoy-aKX2tBaYH?xj zQzvKD;9n=8>3rif$x{)WeFnCQK6|6~yrei7xD z>|ueMo*<)*PlZw3euvF>iisTIc#FU0i6^*u8@s};w1XpyCiW8Jt-i_b#=5w79O)^F zX&by#|8(jAMQGcim6CBk3GCMLzkB<`b$!KuowajhKhd^&(~J*mioVz^nbE3dP6wN| z-{MHEy)dlGY@*Uc^P^L-JCj|h-QE^M@npWT@KgziC@%<;{ILdXF_xr|G)YMJ;7pw- z2EIYu?gJ|oFDk}Z@>izfYcl}vW-=$MR|}dxM07BLK&)^PzNQY)IqmJShFs{kN&-Fa zf9-*LNG}y1ds*JA6nmUD$DDC9xzb@}y~82t2yHz9aHfVrEAX3RH99q2b1)?BnmTL| zcY<4{Y>?T`gA@wS$fNit@>n+OzqHwa*Xi(n-u!kZS9kWU?jgg4<5y71A-v}s8gGrr zc^EJBuH9m;(ET+~W4wbbO1%KmrX`92PBRrI;gyZw zO=9DZZ65CLGz7gl@crtpS+v?MjE5bi)80ipEYEhk17#6Fzu1EBdA?LDEV?D{*3-{H zDNJAe0?6{XEop^IHD$#PHXS+!@<5U0!2e7Zy{xE9%`SAXpKCX@PV*!BcsmkA=VGc{ z4{Wbp6CqGAAGM;*ygI!7#xM}Vz`WbT`mw;>kbB^(wc+kWOU`ww!cfue!XTmg41++R z=G>|%xpuvsV!fR+NYyDCNMMy8w2GH(`O)3gs%P8?G&Nh=+6`WMqAS0NU<8vzM9>V} zuqSU-^Ew$S%}|K>M~+XJ_b)3tgvr2g?;Z;^Uk09y4kt|ke^n;*1((!8U2dp6*+$C) zRIQ!VmZprJCRldS!oBqgkuc|Rpd@mqJt%lgL64;Sb>WKwc7htV@)Ljw;JE3J0wxSa zXAt%|dV}{2WU?^f9F5%}oi5%%?Y~jQe^ttu0nQhpoK|~ye$i}Z4tG&+wo6{m`cURH z_W)9i71&qJSCE6-Vpx}d7fbe76()ZBsH{8nD~TsTx_-v_ z1hMFl_t7dolkT?|ZGM7w`699Se-R$QKP7&`t~PbYaW#MuKr9A@*zXcZ$?8-fYh%HY zcDN*uPfF%aF!tyyk=H0_cFKs-jS>?PvSNC5x)Dh*k^dVDKID8$3hX1Ic;p#oO8j!k zj-0#Y(4Ok2k=;LE?b=sqm#4mCn>m5@W$Ck5eXyq0dH;e7#Q~58Nc|A$AZ1drFcr$s zeIC-+?{j|Y_-ypWMn1*%4sXE|)(#ypUUBC%+ObP`vu$}tlx^T&t(ft}B%~Q{{}JKe z91kyGfz^r#!E*%zd`)~lmr9(ht`MSJ7&8t9<`ya6v&!P7QX#bOBAHWFTiDG3V>yx( zvVN&uZ6dqaZVx-Z{^(^$DH*bU&Lh63~4;n!1s-2_cuMue%J1zaY?|hpM}#Y^Z53| z#M%t?plPGVeTALxndd3rZafa1)=N*>TjB1JQkH~&J`KbG!OMJ>$3(^=da#6tquIeB zgd8ezY5bIcy%9;o;~3{yZ#KY5C*U1yLvdqcv0quP>ULCTw#7byiUPMM zSLq9+OY#T+HaTym-0w6eQ3FsW&U~(>9}M5@tp~W(WCrQfirv&dE*5@&7PVDC^ebBM zEveO8*$!rdU!TH-%@+#;yRY}vK;T-<^^J4ato>8A6-mkS3v#>$fYvoauX)P#H%zB? zu*aEPr#09l2Y^*s5eulT!)n^5fC{T|eR0s*e z85eI}1m->fu|S!rGV>?UuhR7Fi+xQIQ=k-eMuX=;>Ht_~>eX((pJpp(^8Ey%E8LA3 zkH-kdizu_b-6G3KnS@{%V67h5%xBd+OYtjO^~EB3j1R3o`l*9jrzY@69K?eWxNOuw zQw1P8_;=liW_%GHfDh;%r1K$HtB%F%4K8CJds9XFE&B=DuG-1^P-jaw4l3233-Pza zgcAly?vf=QK=K)ICBVohulDUGtT5JbZ@xYT_A52Pla0XweY9+)GBHId0=D(TorwZ& z{%Nr1Z2-9JWGS_&T=VY4eecqGEGul&UO8$jI|iQ$2?dT`y)jQo@mOgYU!1~gn)DG_ z2tb}An2-uJTSj(&9PZ8w&DNM@-ZP}hM?~j;eV3}8=62hBZEaDV=*3w9m>%A#Q#z_h ztpB)DCKx1@OPCxOxTjmgUCq%drBwm_x)C^h-gM% z9aVi#j4FTNTHKj<{z0E>i+;qLlQOxHJ_MrQSp~?7rt&=emxceiStFu)!P2|oe*qgA zt{c-DFn35YYzX4B1KB=x9(Re?TB@BETN$S1ltvh}=b(8~Mf0Ut-NDb-yQ zJe108=ydPhS+3t{%H?zv0gde|PTdF3B#LL^XWcx%G1L9}SP7tMnN1e&P8HCDEi8ia zflf8La$t&;ZqZ)#eO<1(^p$O!At-UgGwcLzA`EV>kfhOHhGCI9-45u0-G-9cl)ExgzKTB*F^f zt!Pz?-bKR}MgTK@DIg6PR~Q)`6_HhV5^e!;*!roL!1*!%(?Y%WP%0mFXUwK!XV6BL z&>eTVymmpRSm`-?COTyuiaT38m?Ss&?!S*^V+N9p@I zvTVK^Q9iSAxX4{;>06WM&1?!wnIqTq7nd{4i2KfSaxrouHSE#XfJ{8l4D#R9`QctK zRardXI5~Jh3^Q z66{X-A3_KsGK?#94*--8y#*z{F2&W4k?*b51)g%Qh&v8^ZLfNull0` zrQ%m0V)!JlVLi=AXOV{(Zd}T+?cMd+b|Y`H%$-aE%Z~Z0weals71WtwNeR_w4s3Kk zNp22qR&-t{l!%?@>%52%IP;OGz%F`LknZgu&p2*9E5}xc%9}}Eu@6YuG=!{BdA>c? ze4FO#wZ>FV>(MFW6f}%}{>VqgL`(sFq=lpZ*bVv7mle7%t#F)C{&*tRL63M%lc9Hx zY0`D&+4j(!LCuX7@zj%7(RCIJ;1FwNAooI=gG7oVSigE!Ov9t6tM%qP|W)BmcusTKHJibk47l?B)blW~(HL%rHOG6j?>v)=)EWQG~X33k3Ha%AQ?y zhY<%WUAh!OtPtWR{{prDTFtOP{Lm;~1Zbq)1ai^$cCv!JCGF0Q30JGVKQT^5c|N zVo~N7KjV&+i)M*pw~T0vRD^yVnvN&|0NS|x?8+-3mdy-@mN7!49p0WV$Y4@3x0Q8h zx`l_=U-xZ9fmWUOOkrF+V){apxt9N~3)cG6zd6)j9s@eEo>kQJA4&kPBZ}X`cS^BP zC05G&ZAu%_%@63Ay&sQ0*toE9_3Pfn6LkIRKiLdFk~yj3W7wqdn*gRRzx9t{4jdT% zs1>|CgQ4LIAho<%V&&}+U6W3pf>2ZC1Mpk;{_P86fC@fKRt*Pbyewwf;=I0{kd{5P z?!!nM@%doTu@dX9*55oq!CFkPDHWT9-TlGW4yb@dpMJsmx;_wXWdH068Q0rlwaqt} zf49w*=EeTlaC0PvfU9c*oGYDV6qR*R;;1N!kADKAPh5ObwqmpkqHm&`iIgD4Wye^mrHw5X>CoX;W?*b$- z5s%9i6ZK|&qaO&FCcJjzJM-V0)`!!kB6pjD?z*TooAqM~w%{0Ze#`SC$>GkKL`?)8!FHL5QTGos)UOtpa28OUVrX7& zd_8ugYzX?e@di<&NQ2$cxclauQaaX|Xw4)IB@(dZ)UMFGVVF_i=0R#Th;<#!oCWnf z-(*=7po*99Mth~_;xaZbTMw0JH!!f83^9uq8UX{cLOU61n*zOvBZ)WF9_>_MP>{CX zAJ=V&#M{1S-BXAv^QR}oZ?P#JCgQL)svcsZ_8*TCNbKf${eY}@mF)8iUZ)=E7pHO0 zVCv#ty&K59L=!+!B@fiS(=RN!1l^s5l9-y@L39Sk?VJjJDh=HYHht#+igU);}C zOi?&BH#KETt4bUKZ5Yp!#kium`zp()>eqSg$jFOUASIC}k<~=#$IZ=NMM)<=o!#K< zIm`X9q|W|V`uBab!-hW!b{Y=-la!lYoOi1+$_rL{Nh0d^?@Se}RTb1M62A;!g%^HN z%Gh!(1kjXU%3=6_9r7>H!$5#Nc9==1`(JM-fXIGL)BqqC**k?o_p(b?)9~jvw5C4T zC~NsS6&9vGlS=Q|Uzl^@5PHs)CU!Y^gn zE#}FwX})4JS)US^%~qB*NrmJdHdds&m3&cbC{k~bOJujCMcnh;I?_wi&uMx#_K$nO z0Q<_#w?9E&;Lk3?`w@td7MupX;{*hS+I4G%y6Ak1YPPajI!7*LWrYnK4EL7JyA|7j z2^$4!!<+eoVfY&SM`Vf1Pb`@z++-72+B3PF1c3Bk;lR{7SKPx+H96JtgXPap3-=}>56D{m0No#a4YB6TcnncXe zflCnjoA|CL@>!Uueao<{;E%szMd%x(B{k9kOZbFRYXoPNsmolpC`#E!W~__2ns|6%}{lia5Z&*MFu1gi_Ky|+Md+K>%;?GBR9b*0N}dK=K|gMq zXP)s3y7k8t&L>#JyF)P`=-SE92w3k{u`1~}Upc3d%WNxBR@SmeAkhK!z-iuL2<^uF z!Mn>}QlMJjXiq=vZMvw0&fYGi@~IH7N~~~(+V4!u1!2?ctDLi_vkc!meX1;aDZQi4 zP*RCC=2ChBDq0TRE4<5W{!PMDpGyT^3q$ayAT2AwuM(TNi$_V`H)#re`z z(sHflT+DWEI+hSgCJc|ye5(oYkW|~~;)nj>r5h}aeQZ)2g5VI}*FM=zVSGzjU=J*u z=B4qx<<|n$Rw<5N(i@9c3k~)$B+U8!B9pC5=q1+w|jt)f_cc zmxkqG^Lx{`YP&zn*3<|XIL`ut?16UoB+OcW$5!&s_o4az*YxSaDAjPYUCzR!{o-JY zw!~W6-C@Ge`ztu)b=2)%p^&^MH1T|}NkGFU{#pvQ!4`Hxy!3+AWLPSeE{SQPS>U$`KhY{#vBMME^&@=Y@(rwTcmRE4b{0_+w(1>H-=gLc4VBgSIV8Y*( z#koRH1DbqSl3QrrJ@wl)c)0i3!5fww(&Bn4e0|2BRH%}f-dzo->aT_$N|J_zmmCfk zDFS1bztWn>wOUQr>I2sr6&;h_rg~P;f;w9+D}>*cWqJlet<=Ne>)!tNg}2f~+?#Du z#iW#4KW@F?j4{~w{w0Rr_PTaiT>7@wPhRo-?%75PNhK1iKwEHXl6YNOeQuJMseD0d_Zh zK%odgcz_VXLzsJZ-5%oDmYAR2rh`I?4*o_Hg@#_YnAJL2w;!t?Bi z;EMvs2<11=uot)QSpDSDlKvX*1w4kqYSw>5lk_*=$g5U@6kuK_7jK4p5s2LjlM<0C z(!VMS8#wMwQ^xx~XEmHfJzlsaN;NI%e)IRSId1_z$>IWk`TlRgAZaY3Ql5f}=~O-$ zGA?}?G(eq$`V|_Z+dRtgGCK0_xy*khI4crASn;`=y8Cd z!TqH=dRo~3{1DK!tvF3ioBS8g@rpoy1t~6^ueBIFrMNmp;HVR-$dOfQEId_73&Oof z?i`&(N5DihMmsiJxDI8pmsX%ZaNnChCUJ-q zFc_(-i$v9%4cW4(?%OgAMHItV)-G-dE{je2&;DLZ-~o4_!E_>TR5sRysKy;%vg|@P zA}Nm0ks+Dgs+Xxe&_BAzzn=A9Uyx#gRY>mDF8e=!%zsxI_~y#*{`3Fi6PR(_rqHsP+=Y^aFfT{q0YdBrs%0WP99!K3}&h(3?bE^HM&Q zZhR17eDODJe(C;+VxI1O5~~xOQi|^%J41%#^7{8NoBLb1-V|E#H=6qPo`?#UC;piI zf37vbH%NOk4G^(gZn2utS|OtEIz@_=8el3_cYD@PLTnDko5}!7MuSe(rq9YxZ0&a& zARyUooNUmttY?;-jR+iX-RZq%7}7inr&Fs`3c`8)60o&HI81D&kCrxmw6>2{O1jhE zc5OT%ydIe4x7Jac;wWIi-T zVD!0}>P**l(@GT3F|jGZxgdP6C2k#4A>htow7Ppb`+_8noX6pw`iZ|=@jE-yB@K5- zfh--f47l$oPmE9hd@zq-Dt^&@)o!>j#d}UAA0B&Tzp^TdHUbQ%8f}28<+W<{R}0Uv zU*kn>#2g-d`CU@L@ZjR_ zn0W@mXD6L6&0+^&(0ot{Yd*+xK{oWcJ-fpeuA8M}Sd6^UoAncOOQ37BeCq{i_rJt9 ziO(EKkgBvkRfP85b)N4MFJ2xE2sj=z&X46dzinFAKd*i5)=6V=5c01FFKq-1Z-cu)=;@5W0LQuu8ubs%n)2PrXd~kud6@Rr~Zp#l<31p`Wl> zV;$3f07`wIN2|PxOW(~u?FHS`N`7v`WrX|l?gZ$;*y7TSxb{TzV*ADn`WqnGB@*wK{xUuqOB?5G}%GM?f$h?AJ zLPU$N^!c_2MaLAd=%Wp~uE;TYB-=_A zYP(|fr#n@&cHx%tycGzj$xpTyD%9al1#|>THas+UEhqgT_Awmxy;uDvp78 z!#ZA$6ldTB+id+ZFq%p_mL5U@k_=*iKv)8sP5pCH`Y>M3=jLQCTRAw|VSrv>#f^XG z^_Sq@6&t@&4Wsn{<}`}k@tUASUZ?wY;ZnLeGW85*JME5_w0gc}r~b*gDs8=!du2^2|R>C}}VOJVxN z1)4Sc5luJG^3Aqu2^``y;M|6(vz3}e0x{J$yqi=C;gw)6hUKuQC=I&K6va9xS&U8a z_t-+oLJ^ZaHBi$Bic(oO`eTC5_?d1%Dt!qMeD~XRYUtG2D65prd|S(1aq<80i~$HI z%lbH465+^nmpHT-tOW&H3Lu`jfvXi*{W1Z2)SCP zJX@cA6EjRBAEdoAQz?zQDY8^Z>hWLP&x zRfk^^YpygH1bt z=l%ZStp7t(Q2P5POWWq`bqpo1G4#f_0yUg$aW3u0o$4yyX>TuMUcP|*XuexfZ4FLd z=n6@rb7Zr8X`&oL2fNl;1`8B!S$J9A4XL%;t;UvwjI)gV7gk-cPR(#}m6=>mdl z(yOJrUWRMYrPS~@e)S&L8%(?0rIy{u`cvYE$RDQ7z`NQEapjJ`PdNQqt<0)kZkLV2 zBa?LT9a83_XSQQ-wOvdb|`P z6IM7pC6-Fobo;cgLWQ?xix_P-t>|E(Zil}&j-$r^$Vd&|R@OkQ^s$e?IX8U37ruJF zRbD!?g}n;VF+ZI)T-X#wB?UKLNb+RyzV_(e(Dl%#gchssAq1==Ysu;8sTj3!kn5%Q zQFQ7ZW(XZ+5Kj`zDm~Az75tOi3hg5ER;{GGM!|EF%fVie!Y(EK4oibG_Z=#wnSQb+ z^EtUzTMQLNq-%!}H19nV$X%YVdg=Y`+T*D89H!ut0PVBEix$$?Xf$NgV9V^XeaU`6 z_F^fkm({QhG8TEpgreuZw<4#Q?)EX<&6Z?YYb~{F>6t+cZR~SMn<~?V?k6jD_lqhS z#9=erz57K+uViUA0lZ?WlNnv)V_m4f>*4oq5(RGu_2`YMrDuqiOAnliQ>_)fvX?Bw zJq?L$Cr)Ci{O&6J_qVQmG9$CwgRrUge2_ikJBhV<3rKh&TKcTNyQfUB$gQ0i=3mtk z^V=W9b3j2BG~3_pq%xS&!uib|he1g&|J5+vP4MH%+y(ozURfCY3SgoMggVXZzO<(To5;)DBoc+~4LmE>27^lT6-CDHcuTrY?s9N|4O*4#LEuQbr0yw|3T z+;{EHpZ(FokRjLS)<9sTJ3V`&D%>gX7-9hYb4GW2q>CEMll{zaTb6}O^Ye5n_ovvd z5lh}S=;gGNnZ}CRrYv96mhj?@Eu4Mu*1bEpNmG9-XBXbxYzf&GEcw`|;Cb0wGs!II z)>-9i_WYtX7jpk9?yI;7so$epbUQ1)wR=BZ16*@bL*S{HD;|AR`a`B1{ZtQqr4{qe zYelDPFRIJtwLEIFc+Do>wPlTNv21{q*cpq=!1*JFTK{8x{FoU?4jKF`YWJ6FT{VlP zX6f8P&t0tN2H3Ro^?b!23`CDwUf^N#na(({@mp3oG49B8yInVq%+A@tQg zqS*=l8O6_CV*6V*HxlQyl=RrG7eM{To4SfTWVjj6i31^p5 zkEj#;|1c!6xS7zBn2+waFkZc5eXGI zJ%@b_FpRtM_U=7t`8G$(7zx?DgSzN}N3^*!jMqn9bHe8lExx9RIebvG#zyoALcbOM zaEEO}XA8k2yf90zYuHqnO~Cgm@H!AUv;NR6;7weK3{JR zGnZjA3&cI`LN#?o!RqCEYFI-=)_g z&+pJeY+*8+!fIIt)orc5I(>-586-@U)xdj`YIEWEi55`1E2~$0t+u+CK14olYaNpp zacA)13!juDLPCri*@yVJuql_GserN?v*28JS|dhz)$8V9qF~`+O7+ysL~((8VqcVZ zRJY_Nef1aqaMHuLO!y(nty)|X)5pZd439t#XwnI1TNS+PukZeRjfzZec&VR(EItI6 zQDcOUxMHa%)=OuMVuE>3rNX|=Dt!5J@Mu8Uv|6MQ9bW%v{>5!8ZlW|5n;k+U6AA%* z`gQcM=TCB+R0?wqj>%5;R<)7my^b>V|bgTD^~`x!qr1o6yojugS!4} zYZL*EvwZv?kEC6c5TqnIENUqgsFd0v;6 zMdBXMqw47@*OFpyy$=DohA1st?4}CRH1i<|K9I--6bMZU^C*)W65|9@$7Uds4>g{? z%nYjc6W%D@n`cf3xa}HxvILXI=uHv+mpYszr7==@Om9Pmj*cPVh!wY;ES=K5@rN{$ z)lZ8uLT-Y3-L%qD%rkJIzIdkRmO)0p_}{pH?zz;u=sqju*i{JtJJt z`;%Dx-7HZI#lg9K-&+5@mX`bbX_i!K^yZNCdB3FgIa&9LK|0F|+m5sLW|T8xjnY@E zGbr%!W#dBxI-UzL-%#31hdmu{O_pL}JPu~M?=YJRpFXCt;Mn|f*7$70-9*l|sh(}U zYVl#r(*WJ?@i3vm<=m4)+i9rhLetZ8;6!Nh;HrCp*vY^I`$8bYjq@@TSXx#-C%pOL zftj&T?}E8HavJ~V%#IWuN%xl+UUT70%gYttQIyD}2GY`%u#s&Box;OUT2mB=JUXN3 zth+zb10c#|&GC>oWsFXZKaTW6)dQ0KIO&a@;%$>k&WU&|1Ci(x7f!;;<63fMk(+`b zd;6`MviKdCtc&ziX#HT~vODp-`tvz-e?&HpUVF4E5PWOf4OW7~;T>GH*iRF@<{u6C zyq6(lOqBvS1I~7499Va>n)$W6&Q@~}nO`&hy|Whv#R!dVbEp?DVM-`C%NIy*mvJ2p zfX4z`FCGNmMz);?S-4ksFoq*JiS46OH~pN+A~Z>O^arL8G$oeHR^&zN#nSF3MkY!d zSBINPgxeX0*G4E2pUdw5L)}}4Mb&*_!-^;%9g(IrEbg<6Tj182JnD&%uc( z&(325UH7Po`s`T0S;=Ypu=JzPwKimoiVYzKnB2J4b9(_d_UUxdQ8l-Y(8_9Co@EMy zB|YXQ2e^Y;7te#?kNc?F%b_B{SjKPLw+A}8AZKz{4I7T(-b;!8w67n}hzgQYork=< zZmZ)A77RL=G@kow{T3hH#FX#S!Q#?(m*e9qSa<&LF1HF>O#c%><2{2Y<952fN7{|g ztASeZ=c9_QtKr$x5PCH7g*&r65FCTi{?m}9MfGk>?A=fMM`&S_@CbUlJHnXTaSyZC z>L-IJIt^jqhVDB&EB<~*Jpb{+z{;~#jLdPl9Hqr;^WWKnIxIv$nCY#hfTo$zK{A~ib#>)bqHcNPX zFnWXA7lhT0T?%$0)<|P;#q*nyB(JtwT)HfID-(4>2>X5kbvkH~O36>Cu5Eh;*b2bv zJOg82Xa>=bB-D{suu=}Fn%5=h2WvdLl5P(m=h=85%F>~8aU&#@;;|%SkdsU|U>FQ- z7(9jL?%wM+kM{n{;|_$_s|zSv`{I2t1;B?s-R1^loZ8GBJ679Hps)N9=S~HRiTu-r z&pPzw(m9Lc&A;f8C?xY2Yly@ey`Zh28Qb5D!L$!m6n()s+FeTF{%Ib&#G^H=0A5>7 zJyJr&0|Uw~=%hV!8t0g1=Ndd%A8Ua|By6UGFFSL+bX73*?N97@F9R^W<#D?sqa2K< z!oD#zelsMfc1S;cf}?bN!ZhU0K4s2zI94L!o0_C`uOdzm*E@g}$?W;GR}hx%WZM>w z?|6&e-DvJ<^sa8_n)1D^V#_5@LG_PZL-0v5cUz8E1M6z?gI4hJZ$1Q10pFq=S)&@{HQh~OhMczp8;f~IPoYU}`Us5GGAJKapZ@;1!rxrcr*3ewK^ zJ`taA#wQty4b!FSs^~#b*!2W@hMG&y=?`p7TJ0&e;4_`3p})qX4Kd^rx&FK|MB8^| zY)spF$SyjXxqFYc|3UQMZu)-01hSTh$OJfd-UlxCYVsj$19EODx7YV*Z*j}c54|@~ z#lSmj3t$i~kLbn>B5G-9h0Z@D4gvU~HJTXhDi_o1#yZLL58CX^#GTp6;t%#&?Fo%9 z&zsgMXC`gBi587jf&J-l%uMlc00vpqzG5-BFbvgK8_U5BnT}JwJ=LG`FaWX&w4saf zF53E%|MEwgNSifQ)~&|ECuY0HeWw>ej(uZvoXg9z1 ze&#IEzj&uD0Z;eJ(@=%F+eRa+LTml(54|NSS|ie^)GnShCiwrTOgvzW*|ct`CT#!K zCVJ0-G0^W9wu^C^RAu*}9Chn}tiEtAOg51|BUEldpy=rAa-vGGg!6Bg_EQ3vu6S@IoMY z{j!4QyFx09**koNlsE>Y7=?5v$8<;Efh5B?H=^-KqYwhf42O2PE}KSJh?{Y>QE0p! z&(Q2c6lp6S@_e%Sf-dEc1qF`f0v)G05ug;+XbU z7bG{vA^L_!I#7Qilo4slCcgz`FWj< znq9T$ox(g~Hxw^HQKZCWQM7o5Qe%wwG=slCm4be{+wNKR_JfOH{q=`1Dd%$|aP(5Z ztGnF0ZD--yA*J^C-Oh>*FTW0Ky_#{W4H}1E%=*(EA8UC)k6d;Kjbd>uENP?M7dsw; z>)q`DcJ2@?ypF}3HN_WxdBnl zB2^XZcA)%3vz-#PB27isGqDhX05Z&p#@h;qPx7t;^3ia(OScVBAA{XZu*tZ9LW5!V z9_Ad37c3}lso_vH(6?-d7<5fkT*#BLkrQIleDI~*3)Y~nJYO0hJR=u;KRNNXcLsuv z(O@4EVOA~ELHVx2v{$2d%N{p|dx_cFo~Y)@aiiftb`x%FROnS=H9T&}2sE>@(qT(` zwLICB@ua?EJYdk8c6F-|x_&WEappg#t7A zdKgA5lfTd%?-$zlvU|@C(Wp+~5e_9#^>)fXKC!f#Ai&{DSoN^%xdXC&9&)z0m7b$9 zck|uFsoY44{&PdHs_5G%0YFq5WD{<4yS=1$=FlQZ&^wCL3LiZh!DJVvJ7NxnP4}{a z13xn_o@H4K#(Q56&?0anPylIBys@q;NtgX>K&< z^?ee+@M2|<@wgTN=b3r&lNQZKqsq$gM+O{p^JI}Ai{=CO6w`-Z&#)X*|IWC|k~2JR zl&Zgt^Dv@M2aASyrB}L-sMchI&2OGqC8(wUapRzJ`m;Gov=3Ysb62~CHb;ahkQF_p zM|*eL+acF`0wO9?CM%PJ*NDvV(sXfDpKmFbP6;Cv!}7_((sk_ z4oW|UneChWIDBj+41=JM<4))&(_vq%k3PPr)XUWEa3ZtxEh?bAYb}6YUagaRmv9fF zsIPXEEzQb7PHJ||?TDX=5K10}(R7439rlB0t-+?tk{yBW!prU8Lnp6=#T8WdJsa!~ zoy#KR)ctndK>1Br>#W=BRE54qY@Grhy0(wcb#+cN@t#k=mq^#1GoG1{aes}zkJ8gCcBh&?8y)(%@DCpIx%dsPOtX;JsYzZ$NUnr!VO{kA+&iPWud-^)i^-`sq}}f-_XH+MImfeCG>NVC@RO-I)Hk zox;C)$#^F~1R-p8!FmIX9b!U!l~f{jN`=uLPCsGuc+Cpna8Z)tV?Eh8WQk&#|c7pw1!zUo$$l44g=Td1(@Gomq< z=;q$t>YxP=zi4jv+;m|N`D^dT9-7jMc4e={lp!i&Xl*G2WE~N_X|wg>@2@+%MA5e2 zq_CwKc&`a(L4;7otkwdaUXzyp!k32_OTEr03vV|jNSk7!M{?Phc3u~S6x7Z}afO$5 z$X#6XL>a{q;B4!YN*}UmQh&H55R*1q)}N3s;e>aIccxq+2*(n zm=|B^;GV{`^hv?fwjWek&l-6r@?GAHt}agc#+B%X8hT?9COmdT-#Vybyh{+;?{Iqr zFD{a5e7vY{@UBzJ#GQrGwQzXH9NEfxip=%^^!cP%&2e^nwM0|WaG9P$w6%@-`?A6r z)M%xgh{yVsYYA*NHd>4h$gUe7>uWNzXAtZ*wr3%+?7+8CZH|LE8a@y4X}TuceY{X9 ze}4z?y89;xc148v#ZPM(AUV}`b`O0#uO>@SqWmk+qsV>l(Sz0mrx3oho0kTl*W0lg z7h6wjfW-$Kbc}8S_BMJvdDrV09o0AOj}P7PlCGqxJG_HI_u{m08{WNSeotWq9aIrE zhBnb?CHvW72i-mA-3KHLa9$kVqKuxYmHA|j%wR?;D5GTpg81p*(2k5b8zg=+r z$6uR6OsWD-GqN~MrZ&#-cnh2Okhi%4(y#pa`+37FRhW1G{mIdTZ3=HOAf)D7Yfqmd z1}?FzF&A5OK1Pg58l2=s)y?j;zs+|oUdB2)Vfgz}Y3HVwQe&gVBQuFB9Q$hAF7$K5 zJZ`L!Lee8^%a?3VK1yqO&67s00L5UOMaTDkB=Y2|#bf;)u5=*UJrd|n+lk}%IL_rk zR7VM~bzhzI*#LAuZ@)%YH1AnbXW<1!E+iZ*|M+GcEwy$!3%ce(d~pg!}%I@(BchT1O1>;YGN^H$$S~w=O^SE3!ODr$t-`&f9 zP>9+$gA8H}M?Iw4JRsk^O7uxGb)E!h4se;(xt;=XL|&yJlTf4Tr(70f266P?PmKZB zq3`j6J%uiI$hW|qC4dqJGj!LV;;zz1U-#{w0ES`Y`)-Ig#;CW~>jBa&lHdo05m!r) z_wSeask9%%m}h0?LjxxycI7)2J~prMmqTWewymX)y`#Q?FcHS?r!m>xhv$alj}n<; zlj!;KJi31=Y*3AN;y)UzfRjDzJD)*%K#!M5i-U;p&dshz5*F@m*E#j5uvSJ z92rUr^Zv}_9pV2AX$BRn?bF~vOdUO}z*X3;@qtrr?ik($RRf5NmezZ}*3EC?!F#SR zzjBU~h0li*U3FyA4q{(@;1WPW_IFL4XHFlYWH@RjAMTk}=HF$+jf)nZkg$4)&-pL3 z=_d|(hKZo0EuOw!<8siS*6Zb0qi>Hd9QQFgtv?~n{S=! zOuRQxn*L^ZaQfCy)2t9?^U>1knBkyMXENCj)kJCZ8)zKBh6IM(K<6}g5I@by>Q(pJ zh~^)B%6sO)EG@}1`vFnvO3K!`Jx897W%=~}>q*y{DZ_u?`h}hfOaf93UJ;vu{*L$% zL}SvqwvQ+aGxC^me7sjUuFa#qNI*D8uhGB#&Y+wB%_u335Dm2`{_iBe#1)CfGlqnyU}Ncp8fq;T_XhdY3O)-OIxFrZBmRTm5_blmE@5~g;f{#xX&!Kkz`Tg$@F!3A!9V`_VO!Ys&-M=`XUk`0x ze&$t(IBowGb^ZOfe?9*{@&)l>2SlydWtxy1#Ot~}y56Ed_1iL#>k`uux)eMv;5y?@ zA?A}MT1~_hCO0PvMg;_#{tsOD`;^eN{-PZA3&1c1D!h*WxVS*c>`9+aLA--qY`UYU zE23il*~xxS9sac(m}q0ILZTh?ht1o7^w}BP9`?2jl=S`zj}M0u?%7XF$ep+#4nZ&(EzrKuNEa`OJ1f zF{Px@;jc!wQu(i`pS@Eh1R|duGyc2eKplXv zF#I=b1L^Vk4M(Mm7L(zo&`(l6s6C-?0Lyv5bjyX?tcSAeb&U*Iz)I6V8i&>l`~U_v zh%Zrgj?ETWa)gg%)dlY!U46t~&W;v4_-d>UWJ74s)HYXW{hrYCpRu>#qut+JN813% z97LtKNP^wR$_@g69A`E?q1Rb2hBGBWg}GW4Ydsqy=0z@y3S8KZvV;inel4zBolW;S?2!TfJj;~i92|dStF-lr+mC8 zc6~cfe70FPCNfjM%;oQFJ#i~BPac>4Es6QJH32XgDZSwWSe;FZYb@b!-sCBMgZZ8v z39vV7jW9oP{!aIkMOd}iGuZ32^?VpP3o1e|_*`|$^ZW+7|2KO5^MT+QkiLQ^n#^y% z_uX=+@mq=rXdFfN(Ho##E_#BgX9J`(Q3T^1TMU55T=9EU$FmO97 z-zO-A%voO^2?jj}((@BXi&b>Y3KI;v2*{hIkE}e$ID_c3w_@Rn+%Tr8M)$o>`=kmB z^{M|wVg3eOtic>y+9JhHMeVHiMMT-87XouuS`AQ{>QRn1Gku08a4Xm+0~`u!dB)wx zKjV!}5Vrw+_PIGA4|eT>qvzd!X6fG#f6TxdT0GbG@2ni&DUnHD9h`%f<5-%cMCm*^*4BuGGE!ADsF3 zwy=O*n5#Px{0frt#c6(C;+t;WCqq){2BYuvjn#_pJ|4p^qElr~sr8c4A= z%#D%#%o;OVpw-?3+CsRk9B%Lc=@?tE+-@f>`EO6|F)BmahRa57jr0FsmEZx(#|&YJ ze0=e8Y!}>(Potf3E*{91=$W=}q6ar!<#A#!0BKQU!?lhK62jnzDp#cL{;l)hKFlAz zY%hvj8htF2UuETC9VA^*EEA1<;@zd#R1f4{ebWKQ(BWwUg$zuPGamlJ#sHN)uN>$9 z?dZ#*`v@e%@#G9f0tRZ0a4`+&(CC@R{%4%ac?La_#aO-fPmVys({O8x2i0SRI^D*Y z4EqJMucZL~%HW(}wbzmlWv&Eu1?T3Z){Nbk0lHMg>pScg2R~S7PI}D$YnA*BS_QV+ z_d>wjH~zStrtb>c`>sXyL*k>Tr?j?YEA4&3 z-SytNB}7$pJ`(qxw8tN5Ed#`^J*Bje7}Y3oRDZedzfk@E<3y^Jq8q|By*A$wo?l|^ zzZOyF=C38wy*W)X60s3TBp5g-iJb{Ml&8a@zdTW~dUY!^X;yKklXtrI zgjwfm$1T^5uV$(#FV@-3vnze1ukqmJr1=T*7?PG)RTR&wLSo(g5L>=igA3uCTbr5L z`lF;#UowE@fx6@%w^A~{$OOnbO=qaK4VRXG<_IyJ564pig(%_1y%`@Ltq16*8-SVs zwMmqRKU{CUY$tdxo=+%y-oJ;Cv#dCC6i)pd&vCWiJWvGiN*kqi1xC@|xQQu#1C8MQ z4RpG}S#%%~jtKdz#Jd~81aBSQa|be>7hDxRzo+XbJ!QN;RX8G9&NlQAv$YMzD-(nG z7JXHk?6Z7nYgHh~zV!l+T9DDprGxNz2R5)F_4A9ref;l73#yxWL{*WrRlhg(F269Dwl%*dd8=dS_$^=2;PAlNPX4Zp^b zE%#F4rZP7M=AS-R&wSI&%LRJS|N7U*#DxgwednjJ9zmb~Jsm&4mVt@ZcgluFsrHAV z00Znv2fBNf&LI8A7U*7~-wXsl$asDG_wN0g>R;c!z@qc|a{cUW;J<(Rt>qhNQUv1v ze{EV^?+m}}kO7DuT5*w_>*T3~z?Mis#Ieh}6Z;0}nPTzeZpPxg-x0yCpZ=f}z9#m8Ub45UZ89FTFk zz}a?9xJ`b4=ik@;F0h;_7OZ~^-?LlY{;<%;8LAI&fYk$`{}Kpfd~9qC(1o1R3oa_* zpAuJ@`X1U7RRidy9Pu}m9ZM$yNA*ubumAs^FN~a>xY9? zn5xCQRTg{YyNcs5dQX>Pg{15ce`;6>y+Hj`dwh_b#>mYKRl z`QRMlv=afJDBF@2W&9;$8G--}7i2~WUv%qou-R7(mMw!nr&_iFT=42Pc#@**e+)Ar zD}JcVOc_fz*um@XIV&El4!B{HJ&M>I9UZNumsF45|LaqKtHJx-u`cWRah+>~zURh0 z4V4jSst@WLVn5pis1t)+Gr#C+y4WwI$VQAb2()S3A$;<)?*#$k#!K?~8fL7DBt1ns zNh#=c_aY--8^d*L47=35KMx2D4VKOvX%}k*fg}fAPf%@BR=Pt8KfdXtXi1Zq{H8AT zbby5NryYh88@S?&Tv$x>;QOA!T?rvq&Z~9BFSuN4m!hU?91m5038CLRMmlcGju0bL zfy(-NL5(n~V1O$8Y10%XU|4P)KKNa?=_17+A`M2Sy7bY|6yiCZh?#{6t1CAPMBvZN zuZJS3YGQQCO}?q5{dr6HsCZ+6JyBs&lSK)|O4)1IfZPU3$dauRHa&RspZWXe=i(3i zl3uWPmD88L?F_ScWZYdsmMiyrfBv`an6R`&fVaJ-R8c+b;5r$aM1tzQBQPzkP0KVn za*U6A0_PekFpEb!2RWWHo`IYrS5SvPX110~8g`s$8?TMJRGN@YJ3c2H`RN}JfT0AV z25l1-cL9ZXo!#cV@oVu#6P88O`JuS$b90fp#(~quCweo2TSf35+FR(q&HX=*U)6y< z1mSMR^3tml?y$sRgpWb({9fp2bOa96YnUqNwgxv0T^>;5M%3FAn9HZDy#xXj}W}&<|RCog(mrR zJkB47FAWTz+F4x;hyUC4rV$WE_F6&*6f3$E_9Hew&>sfc@tz0#Bip>j039ri%czEA zy1=Y^Z>saYTian~FbzrH)Ktw_hye(Jdoq%K$yR>7av|1GTs`%m$2#C*fF-i+H9(EreLLE0b95j(Hv%Z3sE|UKCk>2F(ruYgsp9)*-{VO#62gBZ z<#)?he2hr^bTdOm=$*;Ptgalqc&O1H25fep!YZajPv_gedTkX~E0Vzaj1HjPI>Sxp zc8(RjS$F>2y(cWcy7&F~NPe@Vr24dY+zgsT%=gUmQ2Vvu!?JO04ywh06t&v#aih%q zf0U&XK$d!r`j(%S0IY5@Y2N3Dp~G^aqWWhAfBwzJ&vWJiXX?sOFaqGsdbBm< zwfO~~{4rf2*N1MWZ#&J#Wq!#c`G19!2lniLt%hSm!FRqz=ARqht?EY(fjLBx4>Wvk~ zNZ+PWIYA!TU0)tM3e32N>r`5lo%=RlR|xIQh}9(}DJmV5_g-(z`kcdeF$E?~j0LPz zk0W>432saNG@)J*p@Ex)>KGA&n7`{lPfW{ueRz+fCW4B?TG7pkjzpC>?3nc zyNfSQW}LWcX;b%k+)g#{UA6mb4O^eP?KGV<2<_Ah+#lTZLtJ0Zfd%ax4$=Q)SqX*$ z1_p~CmuXGX(NVD2MVI$M@|+v(OB5?fGvoWtfVdZFzE|n+42fGyQh!;g8Hp{@TX?~@ zfA3^LTnmusRd77<3Gb1Ui0&th>X%G(ui4WF8{ELJMg`~JdlSRmCVX_<7H+SVb#TYv zN&jIb3Sz`-zDShbI{0K3w%FCx6;bJRqz#aqky{@)pRs~rgPD>H*Zs0y{HAsjh>v3+ zH|dkqsr+M9lCVb@a8QI678W|S2jRMSpC50Gat}QNI_US*uX5-9T-s;;WZLc#IgiOj zE^qrzTkDAXgtTlPzxP2E_a^jobG%rmOW{1Q+cpj;*NRV^q823#z0ow0vc zZr%5pn!=%e>+5x``8v5AxDOIT_KS*`BDaco@`N$Ezy47TF$fL68Dse6QT^WFzk7gR z6ceCXWsMuSf3yHI0suqIg}|cT?_YuYUGUd6e{PMv$+LfI`3AaR2(Y~Wk2L+6!w;~? z>rdu<=QsdTJ)7?DEmP??U=rs%c^JQDZv82m3pMPXh{RtCUw_B1t4rFg^)?wJjZHL- zh;Fmb01GBl(UA8 z&elt@%frW<$8;AbtwzDrt3i0wNp32!%u4UB&jx(ku9^>0cb4_=yA{!*h4({10H4TA zl6A=jp73rVII?Y<%@APHNblh0RphD`O+p(i;P|nzc{V zMfHB{&a|$Otc_she{u(n(fHj2%ZCJy@KaBgJQq*P?h@TYSG2g6?o0D-w2qar zaz$_uWO%l}^3pN0O+#ms^qto!;J!#iXKv)~^xm<@Z^%`MGba%i*;C1fiP`!7NcCJ+ zM|QvkzO(vXW@{lQvZU>C0vdMo(Xef}NO zAi$>UV4*!eqHnbPJ!ZiMyNZijLhOZkUjku&$gAc7;e&|t0Uq;KqZ=`2Cab)N}jXyH;4Qpo2 zzcr7Eq4Wy1)qtp(gQ$Vhy^j;mVa-=7agVeXnU&MFkd<6fFa z#Y3ipf6BK*t{UcNs+*V1bLn>OoRs5b=aEhZb54>188Eg?+aKlkkjLht&dm*rKOXyD zUe7v<;dogkT`xT@kVQi?eFkX*Q!iJ%2nTCb-U>1}WX<{6uZS|y? z-4``=N8lhyJKt@`Z^!LbgKh8i5X~!L+T5)zKpPO?j&S7 zQfHej{E$&(Qg_`+^R5e;XgnI%F){$EF1k|LH#%;fS&V%%cVCJWXydQUaR|VD68|wF zx$F3NS_u?nVem9n1zT_c+;pt{H1?H(Fkf6G|J3&lR9G-TXIdYx@A+^>-sL}*8@D(9xuENk(QtIW&r=+B8bIPU2 zTI2^joDOrfz{Q6Ys2y4XB%khT57StAqYkm=K%Uo~0p6rJdKmk#Bd#cbEgWBr3aBfw zwoh%Kvx!Ql{oqBq=_Hw+xv7(W&okPh&&B29eptHizFu(4niE;wvkJ0c3}K61E2KGw zZJgt-NIk3`?CnT>(vhq(6QJz9PTFTcxqXv=#8#B~OsIkT)$nYhFihm2f9++~q-Xni z9YTP7vmRFE1u=CdFkys{ zyBp3lwOu*#lA;8a>9vs?Uk?yVm`!}4;IsNHwER&rg)8odGF1^f&i$u@aHj#01^ki0 z!3&v+-pZk>Ks+(Ugvaz3ufHmN2jWHKp&a`|_%EhY8V9+xXyuM7_p1H*&&5#d**dnz zT$#(q)Asnc#jIuaDh5QUP*$Pq^Q}q(HB)5r=Xqxfz|{?}g6Q628w~C?M}5~6(mX84 z1rVPpvl@2ve!gB4yWU{De=5r;{)ZfA+NaTRbY+HT=BEfC$k{?T9yp6Ak>4z zsElQff4BguV%UB5f3WB@i8(`D;-*LU+T*b0|A=j;-}c9~tLGQem+r z#g2w(OJzgPWU!RV`p&o%20wHH{rT&*o}X6exXGTy}b(NM$4A}%}VKKFvW z7|msd7WegOx7dRr1s?B1hB346vR-w-!E?z*u>l;ud?Zbb%d(Z5lNB3gYGUO(wM%$E zodRa3I?`gReC@J|jPGS%l+|}e$sKv!RIVuHWnL=LlJULVGO)!oD#EP>Hz4cRF{+~^ zyX`A1dD*+x7&tvWQ_cJ|Ke94UPg#R)^B%`xKFEA6Vf-;&U*1}M1UTXCSu$B^I+SKO z)-haw8)UmDQ5i3L@yAkNk%Ev!&luf10=8+r2A#7@5}&=zpljW2#h{IN!A0FLkwEt# zW*+c(L+ea#mj-GBW4ZGY+uA<(=Z zx#gWNSb3BI{cgIS6(k7nzVSq5gzDFuIYTY0Q=gX@4@W45>J!l(%?^Y5W-&aKFyzVXmtocf7DVPn!)=JlNK(6J@<2X+-YcEt^D0UhDc=ag9 zu~Ro{+?UfLF5X?xPXF4teS(HZKp6T07Er zD!v}dCpR6z#XpCI>lbjo-U7AKyzSL65j8O$_s>PHSJ23kG^yIZDRsOWS`%Jh<*<7- zeuQI|ZY$zrk}%GEVed7}3u5O*_hoM6pZsK$oRn)yf9n2D`L?odekMy2$4i&qtIf=*+vAG@6E*eb3m5vpujFrAGu-Yl%q^T)N7t122Q^DC|P7y+ZOg>*z5&Y473oX_rtCLdLg!Ka+bdr4pD9c9Hft3pA- zvMocOT8yKfF{5IH01MXb2UC2Y*dede#;A%iR8!F0)*^HSizS8NGC=@TX8goYzB^0H zsyh0!52H161b=osc}A|UI`hqz{=J(R_djyq>NNw)UJMWOaVU=t7cWJNiPkQp-oJVZ zOQ;G|^tv?UNjDjZHiWuz6N>u}na1mp_V?AT#Ocgf=BJW|DkZrXC$B0f3T=MP&Z1Bw zl3)z75dIJd9N%0vl1Gdko#qRn!I!KjQct*5KDzHm+Iuk&IWB_`Rj{D9i6Zm^9Z!69 z3-YbyyJOhDLaUGS4_L>$X+Snl&5pjU_|=oT;OaU-0X`B7{})pUVIpox@NBxF!+fzz zOJIANx2P+dWly@9KPgt{KWl%iKc*-PC9T+!*#U=h*0b6?hlS7j%1QXwn8|DiYYv*f zpN)OXgg1BA@788o&jvf9mnI5vd6d+|y2mz`!Ov7`8I*&C@NBkRjT1GlM0W*>2n|fm zKn3Qud8d^mDpZc14881TC*3?Y*swevy09?od`*Q+%b#^SN0dfQn=UQb^}49z-_G~al%0R0)4ly<9I8{< z2ftW8NA9muXtFh2=PJWzb$GqkERM$=*F?y%ObsS%a?Y=Omf*E~7`5@tP=~e4bJLT> z!dN&9EgW=Mtjh_T19wdnC}+xlzQQj#Hj<))*$^Bywk;Jg~< z0jzjSwrmJvmMM049H(1zbDm{a?Zrz$hejdKwWAcXh9(+JySe7&vRAsXgC}04ica~F zpK8*+kIOv!ym2rs+OEKz_NH1bZ12=o6%heO9x0JINlNCGZ_^4}{@NbD3Zt!D3T-MlNVf*u5cIFdN3HO`w5OuB~V3%s9WjGBvnZB$E`ojCu8mq_Tsbc1XcRVJN-7g3I%pp593{ z@5{yb0qqsJ%n7Ib%N7#egMk-pD|`ovu*5F2HN9gSqi$+994RBcUQc@zVncW0{F$wE zF6uMgR=EDA@`H%k*!q$xMYW2rk49mz;ODua39fpY9-?KU*oudrny0RZ;~^yKUmlzJ zt5;W79%@Q}4iJq8{X?Kv{0Qo=O~~QrWiH{DGke{c2PI827a0Q*LT>z^0WKC?zmn(k zLI$%F-iTffkf7Bn?C1o7gwXVLOWQz8OF?u5#qT~yp3Jmd8qPVETwB56+5wn!QJ}y4}JHcai$v$d;^r>=jUf?XdDX#={`TN zeB($^u{kP{I%)JA?VDDBKOax7|OnZdYV#eb7r4^&=Gb77M}8Dn1T!PwN!lu ztLC|=c=tw6UJKeXT-K(qOi!hE(UN)E+~@CQ-_WkS;;24&ydRVzH5-M@Lk7)?`-+-nf?=1Ar^j!*O^$e{8RabY&qMNE zY^Dt4&kZ)ZA#r(fhOGSS!3_xI!wKW)X9%({B-R_`rNm4AcS4GdZxHO6=thgYjS+V5#T5`ue zeQE=NklJ(ve*RO0h&{|S^R4|wMJwvK`_tAGx6wFaEg0C;owE3Uf72D42K&oA8XLe)6AJuPt!_Qg?0Ymu4Qt$U& z)nAQ@=z6!QP+}Y*hBixKG~*8ztv#|ogwj?%Qn)Yll{ZRC}A$K`+(_I64-X*C9%lP3FAexgQTG?HL}N~Lg!|%%Gm8U!9KQP_KQkB|ZORYp*Lz1W$ ziz#MKwmXrC&o>Or(l^~>E{&v_F{T1IkM1?X-k04yF?&Ex zVeB2p8BbaNvF$Lq*!9JMI;doF0$W77ux3nM=R;fkev(uUwAWTE9eRVu0BSMRb$z!! zh9thsxI3uAp|Z8P&EEHX`Y~r``kTXer4XK*Kg?5jk#7f5t8$%+2rxW;MfodtR{Iu<_roEoe-*ST-&`u zkWQI@a1S*ae#8b%0d50U5RRY6>W_m`hXC<-=2G{`u=#I+O8CGrW_8k$s5|xXD)k3d zL-XA6o9~-vhQw!#3xxn7&+UrdG<%<(#cn@Le!+w<_wZ?NE8OL^OImR|uWgm=qYyjcytJ^wl{94%MiG!3r1@7%KqkZE#_SCuE5fsGzr!l(>5*Z1!^{Cg)m&emG zw%X6dUMi6soSc^JSJqB&Z&-dNLSxL3B`^1Ox$sfV(|VM|oh{PEyY<7Y#e(6y>3FKw z4S%FFL~+rg_wHDJN0^s?33cHK;7M)Z@>;+yF0j1ls!}t;3ozTVJWGhHv#iVZg)kMJ zE&yKl(DJEL-K=oh{mGn}lYuXsQoi-1RS&&{*PE;>HWtsnbb5!2xb3+zPOK*lE5KTA zlj#z0`i@;0oT2M@=AY!IHIm~qhjk+A_NR1Sw#Ap6dx3j(ZR5&ku8Rgm-^l@GcGk#t zYJM$aXtbX&03%FldDc2Y3~pMhDxE(xu5s+%7Ot*RG2GZuqQipVP2carI5*dFU$~?Q zRhIHvxQtu*n0&`S_BK>@nqG7GbY6wFUXB*}u10jvN0YHk0Tl3Xn-WvavuA09Sj=U_ zj0?{;DVOQz;>|R0c<)pmX(K<`BR;=|lC3`ykbEkjS;9algl+-TAWiz`QXbf!=5y zz!WIlUsMu+lk|Ifu4e*lsYeW>FK*8*^b74o9RDbXmS65yIs<#7rmF`hLN!M{a;Eemj~y-DcH+ zH0_B!&n-Piq~^Z92p*o!XwH>Xpf#h4TW8v%%wjqsvfNxh_3b`WyZV9fKE4G zeYfKQ_BAGY>9^Lv8268ea!_&y_m`E5XI_nw?z3c&E5~z%mpnxmyC{|lxJzp+6Gwzj zp%iPZZM2dO&30D4DBJcnbal#dX(3a|SJYyb+~95EiwdtcVSA4pGmkR~&=>f{ixFZw zZ%|ck?}C2aXT?JOQ=%f#))DGEhxq(uTcmw=F*#MCH8eKD)RqcnO2y>Gr!P8->#KYB=SG@K)61l+0a>p6DbTfm{?_;B!{DAjf9q1DM|AsDYa zb59m?C}=}js z@nKpYf63b|((^i%o~1kvOjUM2L_p*M1}>ft3x)($&CY(d&P>^w*fJ+*Ude6G8{E)7 z;7M0GnTPbWT9me~ekAlLu6Nh)8WG^n!mRxiHE`0^s z;@fxA4!=?uwx00b{3S<%e($+hI~q=UiAJCbR2ubpOi{mZtJnt+k;lA~{(@%F-TM@Y zbI165^!cky+8bX;D}AA5-859toZU+ZY788dY+*e<)DcSD9v?_}9b9!xA_!c)FRWf` zpNq(m4B-&MP1fBxWO1HuO7LRl0oxBzZFVYez{22jHy={vHdkW>aOqCEsTa^qKf3l z=|iV`OFU`%`IFKZ_Z{Ak?5jfk>j7tw4s}4si%{ZDbn58iP0yc!oK4CDAW%9{@Gw<_ zIw0-c!-ZYoy4!0Q+m)*(=|L$!lpd+^R_>zFn*U>oRHTpw>*|c9??dRfuWi0M0%v|N)>8BAoQ{=cC9LN0k9(=m z;S1=UBsZ#{u%w4PJ#>tD_}th=0sdCZ?cz!fz-AU>9p3d}xL(3&sB6ET6B#l?4BSzc z9c8CEe9^(~Q(IVwg=CQ@^f{?2o;jSaISUk5O(K3I(+GHZij&~h!By}L4Ex!h5&#MS zZm0)s!7YsviGbU&1UvblTGxLS;C@0uYX{%^j zqay@0vNxibJP+b_VtAhXbinAviko}Km^XL~Mr%S12lv^q^ii4CR9{(AbAS5wNF*7u4Y%vEISwaB0%4k=Ct_GH_8ev$yG{G(hy;9h*rA-m(U!o)FEqnSMr?M>ZCHAZ zs9n{}n-W;y4qn^kxwule>$KDu-_k%o2yo7>*J}Z15JN@T(3QoajYONp!{^+WXU5k+ zv}2opu5qI~oO`5}aWTcA4Qt5byLnM(N<((EsNl8%EXrT(PUx4pOMv7+Y2{$U7w{&! zPU%2j&|FVV+EhSO^zUBnt&Nm;Ig5l^BIu({}N!;-qm_9NWrKX_u+HGCHc_}1H zm2PI~b3Q!>xZ@S5l!55=d0i|&DS)#TC-ys%IdsBL@%VT?6`yQo)?FvNeAZ)U45x0M zm@d9&vyBW!87kXiyR_OPp8I!vYff#HoB#*1i?RC7`22tDy=gqu?H50OCzT~Fge0ku z#F%6&+1dyp`)=&AXAfggO4+l_*a<`S-B?H28C$loPhsr)G7N^_C*60;{k?yW{ty07 zeh+xPygp_==Q`(HXS?3#ToWT&O*YI>_znkXR_rA*fT1{0X_08iXm>n{aB$)4qRP}R z2ePjpl)j928Fq1!DrnrqgL4;oiAx>ttI@gmOR1&AyHcWF=Zib;XbbW}&*^PbpzDI8 zp^)f;nP%RVxZTq@#^mDJY02~+zF=`D!mMBbc6a}Uz+G7nCa#OX{YxL7r7fGc!@7Am z+M~Xl?p5}MHl16RE4nA3BU6+kz>ld{Kq~wq+v_8V&nJjt%HY|1@rWLk6 zRs;*I%Pk+}v%=6+o2kxKb9`3fCzIn4m>cT4(^panToBJ$ppDfsr_x`YS6eo2|Jpn4 zJ=XM^V)ihuA+Ot|7hT=A$eb{&`+}c~t2QKF6nrPoCzGZ#=Rrs$9Dxbr7Jj znT@vVOlN`qjs<#3L%twBFmBxxF%@ps*{?1Q2O3k@EP5CZaU&7Fk{=+sYn>>CEBCc! zPNlJ2D6p7Dj@W%d&~vThdwoM4)Y%f31B3w_qbM81)PfIbKdjWofas;yy* zeYra(jT%C05kE>mkIpm`9!2LoIbn|X-|SYI{-b%|qZHX|3Wx}-QbWP$eGX4cO_4PP zB>qtX>=4JEWI$Gq)}6qwRykjn+fRxuTcOHRQ~6SO zyT;``Cl2p>+nK&a_2+-kK!0Snwxlfbm3gQluf+P7OAvyDp*|h^??8J1LBGKYoCS#t ziP`6Rm>)HSF~nBK8TsZph%g8K=-HvAoLql2x4S)CfBa|)bPusC?6tq=GWY_w{7T?Z zcz;94MEt!k-9UQiRw|a>);X|el~_*i8ES6g%(3NKq<|n(WpsY13N6_nn!PBJ!#QCx z(WI#2Mo%_)^!%BB>57wGmWS@KN=EgCny}y=Xn1Rw_23@xp%S$l%(WUBkGoI((3@jN z4ygMSt)920500Ax-IVs!jJ!Mn#ztp;Q<^i^;h$a}qaGP=2yj$K?S4z$HO;yR=J&YM zLttU*vWN1C+(ayL#J*&mi ziB!Ky(#PRik_Q&JV`vikEPN?okzOJp)x(S!C3dN49wC>&1i?F8JAdME!6s+A7%iZ0 z;EA2jgH87;R*Q8>E(_FmTg=ezJ3;%XY&2RN>byKQ+SlLz2AsvpOs4hYy!Z!b&xzv5 zXww|b89+;Yvpn`rU%Mn}x3^fM`%~DGRk*c`^F-;3>KEcmxCA%fJVL76@F5dJBfZgo zd4A@~D$(*FLeVY9~o}2LmTd+43?Xb`*t= zRUb9FU$D8hNd1i)qWpI`wh~ylUbnm~I$n$PVgZY~RYY_{((m!r+rWh-Fj)%9BG#c2 zzX_d1>QfE4NTEHA!;32N8Z3Iov?*m*p>ivteGO)fhAkj=H`|+9L3smPaxuit0oYX%9`# z58f7^d0gRT$z@aCxn?6X`{FL}QerYAeDI(%wL#^B# zQrnpk{0+Ac_v7VP-e{=vgK+Ms#D+B#*fwi<8U&I!c*7N&N7Tx$fL-rh!ph4IpSO7; zBMw|I{kIj9;49_!YS@{UU;Ws3ikZ&BD!)@2E_rD+PF9I2)pxS}Rw9#HVTWpU;u z;D)S>B2VFfdllN@4Aa0ZT`4Z-pzkq?ey{c2l%t8o($*7JA*B{{KP~a3+@~4{2Bi>_ zINF4aMAYU+tIe5z@;-ummfLRgc%_hUkQC3#9S_%-R<|vnX&ZmKi-U;wu{k-%MMO^*FtE=Q)>tv>iMO$n<1+ru~a=>T}krm*Qj?pj$*BC3EW z)cupcNwkzR=*E{zA!{@iTMsM<&>{H>i5?#A*B!KgRJ+eRSvO)rCT5GPaBqm)LP&a5 z2C&@?83y3uZKL&)(mm*%w6o~e)$E(s4j0laD+#HP5mvAuyfB_(48czi@43qe%fj3; zdW3?NQF0!?-djX5voCdJRy?<;F%`xk&gE3RZJLa*U!8#V*h}EUtwqFZcoI+|W?RNc z(%0Q7<1J4SwxE?hrry~f5~I4B1jbqmKbFCpakKX)J^AY2wC_D(3g7ZvlONs=`|cu$ z#)lZhLmo#;N^aezs_<)N7+TlA4m-GZR+hB6jFT7+l~qsMpK6N^zaquNDA55AMri53 zQWXIus+u`3{FP%XTDk@`%8CuK^Vk6(pqzR`huwufRg^OUeAxC7oMwzy*r+wJey+~e zx7*0;B5Y{SQ%9#qXsWdIf#*Ncz5LXc+`qb$t5knzbyzH}eb3TvRqiF?o&JUJU``ML zQ;3`}EnkCq!G`8uAC=vnK%^t-GD~g|optEp1KulYMo9uc!j;7bl91pEE*WZAITA#e zj{@l|?CK2JzU2|}kggZ%51t7S|78-SX`7uB!?Sda+nYFX#$lI{UHao1;>n0$>Cb>j zUySXUD&8n^XrMwXh9gVbq1huLMUgFFjdGm2NBY^{0;$TgU5u7Od{0ZCL%)a=T2b61{#tWzt*ay-TD81}SuS4O^2$1cB1yYO22r(Nd zM-TGA^N>Xi{b@#YMLYzIh)52f&f6lsyL`W#AUT-}7~ZK_MBFijX)p5XR$M(-eIW%T zP%Ln>T$)lXQ^NZBuuxUvEl-#0?inCz_^Bqw^UUfqCEXw$bnT`tvVT+ik%OkivHbkc zQ$jh5fNmnr4ci0Bl~7GpN)G_Nmyg^3Vbk>}eT7ed33?)@fgbHkS3pGoP z?rVF_qoRazf`=~uZGHfxZpXDQ_(7mtC8uoBaKY3KY<6yWfX3~10@c!EgJ{UXInM$cr}AvuR=xZh`~iG+n?3Loyc4){>Xaz&i$@1ugK?Ud0>M#n z3_LzOPZnrZHjO=r1&Km6skagGc&dJVmE2jou$)i_97@%~H&6L}D$5T6-Xz<(AB20@ zU;`7^(8g0;GtgQX-gD9}hwgpdCA$1(DYMS3EZ41xO&QIZSXCOj8Ru8=J$=tkQGTr< zDi@cN8~pBXj<*+Jl^T6h<(Y*7=*uB+%F;u30U9Vun_vFYph@5k&{J} z$%Ru+&xkJrX}DE+O5_zEvuawCu@6%W17+As4+A}JF}F!Sat~nlA#&=kQNz%o#hw0y>krT{cphGkYPDjr&I$SHOj>1_PA8~a;E<8+K{zgnZ9NynNw$Y%vv zvHEph=h{m_Vm(x>sqqy&Y4?ApN^+MjoxwF;8t3yFBy=?S_#6@SrM4X*3m#}9W^M4UJJe8Ab7@y$Z~ zA_mKt3S0soT0Ym@WcPpwz0c>oX2Hn%(++p_QD#%9lMjgLUZ7SSref!oNWTzG2KNQM zF59Mmr~m#VeMQNhMJdK$qTqH*S|R=CF%$oGFqc9hvSfOgR9y0HcziY$8YAx_vAwuU z2!5*YA{bAdI($3be&}W(b0Ha zk!;5(GSuWlXw|Y6qzirBbn#kbh6E0~6N2TdY>;uz4*Zp4s2=s{sKQaErbymyO*F#j znW)6~-;Z+aF%xRvyKpN>ab|;P=9^q8h$8 z8Qr*|TCyT&I;Xnz^}%WLI6tivO7gJL@wpMC(tK$v!!fS`#xvK++1#Od+ zv^6Zm?JGx5YI?{Jtxe}7V0CcrL{(1M#Z{qk{BNp4#F)}uib$c@1_WN4b>6NfoiYJqZ+Ht z6~53WzQ}ihdW5xu!3Zb$@{_44jrJrO;qp6#Y&C2?dw%!MWy5%wXC z$N|uU_tZfXQ$;LFi-p<~>_!SUe(RVr;S60CdxD0|fm(#y6DD=CLG$lr>u+q(Tjh7z z5ArcGXVs)upg{>8oQaccI?DNc?2*n9Ihgis+JRxa1JvF9ZDU#{$aW6q`XJyGrS3-#nTi^V`NA518=f}Jbj$$ia?Szf$DNrOA z2*vQ@`S~723F>*Ec3Sx%V9vUr!OV6$-_Szs{`=*Y^p~riLW36NQ~946yGe4z2m5^>Dm_c#WsaT_*q{^7>C@5 zZ&TuPR?m$Bt7n?u+Z5=8Vd3^b>!OAYKH&Rdf}}WLJE9l8=yHMtYS=x*g^1TRad2=w zgHUMqn@D6se;#Bh^Kkf20{U7JSFy+UZuno53YjDx&axHS+Tt#9%l5d3@kq6u>aCx@!XTfHiP z*4~Of%D9VfHMYiFy~fx?=NBADiC*QGzZ`U$?|)`-tN)hMI_9}0wv}V&g{vm^>*Tbu z$C0M6HP5A?i&;#jF&_v;z=&hlF=xd$qql(8ohO=|jKl1?Bx1JX=xatjBU`3b!m!Q! z{tm+N0Urk36-yMZA$ctoV)8cH4R;FVz%CjP#~csq+6lw`ySRNbpbZ)rK{*dwNl_WC zeNC;?hqWiIo<2aRowHV>{N8%)Lk4UCq1ncI^b1m<@_>M-RnO46UmQ?p>O^srOe6ka z+NY&Rr;8R_;%n~^9D{+cAyer&q8b;Mxe;F4?Gi1vrC+@t2;97-+qQK!ZCZpM|fuQr_qdPe0USOlIY#hw)%{ZiZ7*fuS)xA-zZ z`4PB}*rMpQ#4gU_(WcbzyL(&ne))Q?hZOO}{fWDaoaui2p9sUQ%lVpi-6VoUvqhW5 zx7Ms&m!P1arPDPBdXo-w77~s_sS+U3vu(@>%MQ=NXW!&ClGq`t({l6Te&2)9Mby~LQrY4) z*MScwVh2fkfsk@(*7!5{Wv{PZO4{#=_V=W>wtf9Z1T`Sv8f~hSR9%+Jr=@4H`W|8r zHbjW2bR$Q%a1;CM%Q~YPr$!{!cFpsz07$`}y1jaYe#e<=mtki{S3Laz#lzg#Q9fx_ zroJ!PMVyK5*IE@R%p5G>05<1!&3+%#Qdj`^IppqMe%C^P&s4f5|KKUHg}2ScM6=im zFFj9W<<^o{WsjGOwn>ecfh*w-sELt2^n0l>^n}lSv}6%6Ewbejm|q9&!tw^v)V*5A z@1ZmCqVhMVaEGpWdaS82aWd-`E`dAo8@Eyh5kCxKIhVN^NKj)~uG@2LPzUQI_P-%& zvJ`O(>4+-a2pHZI0ZIts?eO;8TM`53p*rSim2H9Gd*W9$-9HX0QE3Zp$3na=fi^w9 zdYzWyuVUX`AmOV=qP@g9H-Zkz2^I5fJqW_cXuKmKS_uX;l{s;%3smUXS!vg0iPgCb z%?20}jY9-MXH}(YLqQf7#VjM31ua3YUmkg#CVahcVW|$uq$?;W_^qw@ z24Ut;x*hd8EeV+>A&x%*4D1old_eZydZK|^M&>~zhSOxd1U#8q+TGfgizd0FM;q}7 z41CfU&CoO-C8iUnSUEVS@@slf>uc{(YK~DHdt}ShZW$}%!sbd5IdYT`X4`z&_1o3ASCOI9^1?U(-dVluJDZhj8NG;%HXN;VRW!-P z2ki4?JUD%40Hm3Jxy}`UvcK~ML!NX`tuP3@f$YzrmO@|Ej0>F@dEPv)?P2=VRbAkBrFk4 zzaH6rtr!+~Ly+^5`}WzbmAVj-cgkS5Wc3k6H%epiS3t*PM&$6dSnKXqGv~xpg z=OKC4hC6{w6p+EclmL~QWKRP3uSjvEn}zj6M0C=J?X;@- zIs6Q>pDvL*jM5ReB<&&G>x;nr&;Y;4CQp7XnU7TQ57tM$#2*TKy~m82ms;DLV_JW# ziIWdP1vX?#;eD=f#ha*%-d|i5Y31Ht6wr#OSe)XF zO^h65bSM=mhi2>Aq?JtUYT_8u`wAQ+I{Jf`r7r;5_oG&~C(O2YSgvwi>+8<7+%+cjz_gyS%{6bea4w>ZgSIyd2gBxKyf_;56t?G> z7QB$vCDwVFa=hie)r1}9FXeaVn&C7a{*s2X=>BH*wSVvAzZp zyb#gwDAZuuL_DFVVt&a3=%QGhFTuff%YY`=t{l`-#gazNzTQmAKF>2qZb`pxoo6r4 zw%BTYIMeB)p^0M~tmJa%CO&u_>@yX0!=SCFN7X+ow^(KYw{+U>VYFEC{}C;Cwc9yaw?m<*f8_-^&UNX&eFm6gz2G zTI~PEU5>MlD>eLaM~N_4^mp#!P{g8i!Mnd!Iva5%-9=2^ehS zS4=WG#TN$nnl46P*(i3PI4M#vY_t3N6_?XsdCXY47TA#gyaT#8oOnVX^qonYC z_M)T?<+8>GEkMX~K`{`A`TS4?Vz zPVnjY*n^K9V-n?pP%TomVr9c@iJ}1tdqkpUbd>bW>I=8E6v-oxuEX<_=Peu9+>5<} z`lcm>;AuYVCPf_<;9x0WLL4oY(+1ruI|HfFH&3mc3M8 zECJ&zsNwfv=PQ=W)|Zx|QbL^E!|?U*z|u`gweLT-l^8zoo9f{a|87F5t{Ol-5o(?v zQ#%z$onyBTlV~8=*szya{C0wrq)jpD*mBGK^jZ~UUvAJr%Dtep-tG9US^uz1N7E7GJG>?)b5?g|qs{llTMwrMbzK5BzqPP%L-Dd#G~-Y(M0m{ggH6MH_^(lYefN{1 zJg(IQdqE&az%#E&Y>F#dV$r0H8-a*ymlE=bmpaUifr=LD%o%Zoyu!ylD zVA->sWKgqe7`{zwMI*Da^npn7_t~ zWlxM&rr~X}{c~~$vxD*NoJ;0-nSFKmImWrfUMcgzies|o#iSSjX?1B)&-9)JaSkb# ze$I-`X)qFN^^8%wHO(_W?YM`!%*7tMQzuSc{Ay`gXyrfj*TH&c`s<>G;)^#EkzV5> z#*J-BW7D}B%leSnA>kbyA~4YRk@o^~88JdOfl z9vz}}d7sOdK;CyYF&6N1KyU?EyXZOi@uorHfl06C&?^eNVTS|1oEVOHmPU#cOUEKF zu`M3fufNARJ`S8uXT;QEykb1(MvysY*ojNTiP-x!2aRU#Ei=0ZYY@fqY5o0TwOF&V zE zO&66!Dn=8xSBoVzx|ZE$Pj`z%sMpAHmlpT6r1_-=VdqMv18J4WGD)vE>F3lQccFE( zwWF8UWJ_`ateZpo>NLbXXN|&>;2|H=2$wva?&qgtb8KsUIY+#KF>s5h(YPycv5gfr zpndb;OG>|T#Cc#?jT(wD6wN%(DPr^;pVM`Zd_k_i#=^o1+izACuS_@#36Y z=_q7_Ix`S&Ep8F+?$ZT7*@fGXUbBoOcq}m3N*y$_g-FGYVeZM@1V##BZ;4SI4hp^; z@9pWfD|~BqtRYg9blGd-wv~ zTP2MpO{aJPe8%eY2vrWd8AJ6RJl6;)Ej-gufTWXbBb1zuh;#A0=_J{qpWE4*X=qv2Ba( zdt#LoN4X7|+7D8c4jNppN!dyLtxT^)KIC45$KB;9iB!Kd#PN*lMY4K~SJI6V3OV(> z43{}bO%8kd#}TVeYX(&2G*MeC#x6#0voKPtaPqXBL|?8UouU1#*O%-n4#mh-Z)|-ve)@GG_=0DwzJvWB&4~B9DSD z*Ex(s$Lt$aFN$>tAd#jJov*VF0>C-Xq+td=iT{NNE48el2z$r#~LbSp~L;`OHb`6iI zqnWd^eA&J+5QnDgq|gJ3Tr4u54r=Y3i+bN5{Q&++RaXPW_LiZ@26U+*TOlkjlFL?Y zDd0&01w*q$4tdgxQD>=*_A}_h#I(ukPyKo+UUGt7JfME{i4}i;-u2;?p2k*`nw-dP z%84xe^*h1I6f-h3Lr^bZHjn80g6l6;U;agYBi`Hgukk!tiEE9gf2P1UUVD47!mZlM zp*f-jER>2|Kp6&mC%HtGD;)#rla`=Px|?J0${6B-H>U##XCy=v3^axv9U( zD@sF#ri;?27VxJ=9qXb54;IcsE=|8VJrk*P1zVOani7U+QCljPYRNu%HLq1Mzqeqm z^NREjWQ?g~QYf?EwiEHzdGX)gK>y%V&2p;?#1!TqzL%|gVhORlpTay`Z?vUiE1M(X z^o-SYgV(suq2{2NYOQ4ok;9`CYhl{=OnN&LxDAx=$H|-)-Sw&C$n4W^KmW3Hq7>WX z%9obh$_G5va!VbF86(hM`&`1>1>H7a2K)6e9E?|5zKW3m@@7o!qyHhLbo>i4KHuivwFM&vznVjLx9R<6HC zy(GjCzWQS|5_gvi(YyU)%#{^9$q0$k7tHX^rY0z)WIv=Wj$g;RI*vvo^k}Wdv3Q<0R6lj-A8^7975S6= z77l$=V?!I0^@74OyAPyXItm?kQ|eXmLZWu`+&LA+&43(pPYpQ$)&nvRoA{+3|F~LJ zrX!FlF4e$W=NgK?VGCULZrkvb&k=1*?8^T@0)GYG}bHe2r!|ONa(nzgWMdwU(hYp@#25=4@jFB$j}A?`5K-# z*Rwhzi}sKOIsMiB7THw$+YgR#HZEQ+ztjB!9T(P}$48x1A%-Ff0o@d!H+g1_m3(p6&iaIDgaG zCwUohIWq4iJS*PPK#i*jI9BH}_qYFvb@;VCx-!Vm6pPS%Da98VYbytWl z*3fC#Tq^O;%D9))d_VVNGA|nxq*YPd9#?%cZV-w2=&_}ZtwwkBVF8#=#RcBnWM0-A zKaKcL(k?%xI3nMwb?L1>4Gnj#+A|D1=Q;IDN)c)XkmJ#voC_U)ai@wwi|A%(Yq3>C zxl!PD+FC7r!4kn-NaCdrUn5y)kiT>G9}y%KlM7^V#|6?oY1V3YK24r4C2T%sge^9;;oIv5i&31o}Xn62_WF2XD(;g^P)zMtYw@lLGaRkblj_XrkO2S`R_F07Nw9z>Hy-4RoCJ#LocOP1oYTIT9ShbR9*eFT@!_v2^{h}SSkuaw8wm*2eI!QFFh z)prNo^0*aYXEas#E(!Kt-Y5_Q?u;8nQF>&lCXERIR5)|{vjS)`xPhhV~hWn zdo@_T^cuXqme=G<}GMB+4gTu!TZ2a02s~Ya8{REz^qMC+t7^9HaSqN91AN9{9xJeG32h z?7wAl{g}_4&r*cf$Fq8>(H3?9p!hyzDIrh$o0Zne?WeDo+6b1O{RRK}(06jWa>=LK zK=-#TbB(p%TpE&o#_W?xk5Khwdim=%bWH^9*1`r70hd{f|%%EsE^5u3{Z+%8(=- zl&wNu$0^gBz8zAM2S4Bbi+fJAJ~1(+?SeNL^-^@55{wj$V|~E~UqLqZ1xIhFi3Pp- zM`M55qwilL@=8)qFP0rq7^TW?o3_s05dXQPOFTe?`r3`bYgR5R%KWB9Yxhs6$a?kH z!NacE{O4Zv*{K-1Kf(ryZ8WZsQtgtV?1AzS{N9_aV=!0ZQ7iZWt!po4|1#NX;KdJ% z#OIweNM0SHSMis!IQqXf1vv7!mo&o!X%~@7wVM2*;ZgB;;)~`uxTe#ADNEv-+%yQP zbu3^pDa@!S+RnUQ@Qbo^L|(^N#l-(%iBIm6B=ih2SYAXbAool8Ipk_Rj z53uZZ_h(`edcm81*r91e50xif{)EyRmy+uIA984YFRACL6ej0%9iMC!^$9s}Uwfz( z!)4o}mH!HRWbht0Zx%8b|1kDHb&7+cm%h9S@tH5aVJ)6MmN3zDL9RRy%!o*S4Q5Gd z-zj{BItuoCh2krod0+nYzpv+zeULPO3{EqS%2kT_atYeW^vz z$e&=sji4`{s?tp(zrbnqA8V*M<&!LovXfHft#~7lu$>jwVFcujZw*Y15Dn@Y_HMug zV{w{mk?P%AhS~qa%I~*R_!yZ(%gtEWLN!{iZYLvXnyp)REikc=JM~Pp8dr{X`$W$m zHLt+(Ul(gw;vAHu$V$C`+FWnHO(GcwE=sE<7rn<57v***Sikk+gEL0{|AemOg`$MxlxMW{| zqwjQNxiv?$PS7r+bR)iOmo@)HDgEYNNaX(X=)foCGZ(Ho5DS8o?8|5G?GHro%5Ez^ zVu@-e6lxM2C_!K4xj~5ce5d|{otg2HtF;<#@;BFxf#rtFt-k2DDZ}gmQJi{oB8uHT zvd#ya2DyoYsZZg(==; zfhj3G9XFTCPTmst|9?>* zNnc~BTFE~*0Qs5Q@;=GooF)%o!PeKC-?KFZAoDlMBH)qnX56aMUl-24{iv^(%w{QD z9aF*%DorR3$>4U*?4!`}wsbqj!C6OGD&zZ8&3*FEHA^Vn2q@aZ>{MBE-EENoa`G#V zDy0;vdFt{a4IHgs>)+7ouaU&=H*sN#JWbXRd+6PR9Yme2p$JkxpNQNHU$%ux$qPy_8d)PJ=BZDKr54-0Ja5{b+hm1LKv(vKmY)thKC?{~E8PV-sLM z@w0!JL2@X=ZB5-tN4*LcWP zH_n`@5hH`ZNa0EU(t;yi)DiKMp27P(C~D4FF6nB;w=0WOw-Ni~||Ko~8GE27j>UZxatp4uleuAF}+%GoMhp%PaH_ zA%REO{@3tRY+sf9iecoK`%?jaxCh_3zkKpSJxorX|5Mh7@BG9FSbp?%r4#@0?D8Z) z&g&62<4^zcGlzeo0Z+8{=$*)aJ-fmw`IUR}64Ozs-+kBLt6F*F{RshnepJm*!%XOFp$BhfVNPPg3qDmU7*i+Ur-- znEG@70tBUBdCnsNammpIX~m)*tM5p)XdZ;u3q6K6#^05Z^&@mojyQ5;mHIeRDlMr5ob0}fsqj11BYd)^KF)Vf zxB8cO1kn{J336+ErTF)f%`ZQHKI}qV$#az(Lnn^ridT82&vfpDOr3A?8*@cDRnEzx zL>4Zeq3{XSoO`jB+N@H9ZJn^o$v(HS_U4vi`?CrH@ljZ$NAIvj@Rffnly~AWA&@S+ z-M*nUG6{jQMMI7J+dMXw9g=%JjRH^=7ax2@QTWye=0(e)T2D|5myepGf`jm32^rm~ zu)uTYSP(S3r!bY7!3jhprk}-cGKPzKa>9=n{;@fmBB3Jdt6_)-*zp%BCd^i;3`JfBXo|pekzu-ZQ3cZx{N*fjBTq!(*iVs zY3B;V){L;DLKbG!5&yI0I*$9id@#7S*3ea9>+ElbbXosG4}3SHm6Gc3b&+ zk;Y;Ki8mO~#A3+DPvyXuEi-D#HHYJ{QtuT9d@nVs2EQt*3}!WAV*R|ozgle&>kc8_UW zt`2bMKm@zR2r@umZbWHL&_+2!{6rl0*O^Zmv+r(2*Eskgu{zZS_D{8qt#HFqpvW%R zLXwb__)SlJu;)ljG@H$ZMxDN`EQ20BJ04;91ML%fLgL!RT0;P zjUq$dkZEr&qn;e$;*HRZ8{g1YM8i6mvBlKav3C4HBLN;GSe33EOK3Rmwm8A63yuU0 zpVnSH{Z#G_D|1g)D>7y*7aQb>cB=ehmd7h^vKEz~3{ET6VO{u_o%Tg}pK`BWpJry4 zFs?bfQ86Xa!y)n^0VJFgK7hD*_uBiC>H{4hXUux!m3hPM#HnV^Ez!$N^CJh<*8K-d zIj*Viw6D4~tSicy#EUe3>kgwOE-glz(OdAp2o;ze8XOW9R>))@mrL*%)3zOy0yS*# zWR1D&pkah#opoz<*{6eybKaZ``*wO1tmHheo>_Nfn({?R^KFy3d6!ELa%Ns`b~t@k z5#3|W`UwKPI|7-EHls=kh6RD14zS33;G*#~n-i14vvQA4;4*In^jh8Ba${_M%U8K8D*Eg(5{DW=HQ;sG6@tZ>=zbj2Q4=u;E$CkTgb$0 z27qI@-yC4A+U4{2@=}FzMsL?B`>?U>iHs^oB!Ul^#L%rth`}+mT}}9)>!=k+lie2$ zc@d=|R9hh)^Jpq<*mX)Ol6ent(Rk;fpAutvykg>RIToM{X%hf%&eplsx;nmgaUyqm zz$L*u797Q<$!~jo5BWh&mZ`bl^3q<9!C*2~4t=K36Bn zG=E0R)J&^}PesUdGO98Xy|o}_bdj0=s=WR#jvBw|@dl$A0yWBmm8l22lP92+_-hlQ z3R*3}N+7$v-5`=_KN};44Z9hBDW2P`WLA6%rom^b3MsZ$NfS(wUv{+~mb_GxQ3Ktt z$KP8295AUYIML3tv%@$|GFe&+`ZMS_(@6H2!rh`n9k{pnSVDjGflt+kv6Zs5cBZh! z!jo&*83cMe^Wt5Hfz19{f6Sw4>ew>t0^Q9#MCxYOmHzTkj7IBc8+=y%RZA(^A&!)6 z$63c|IV~;xJ02YdPt(3HK7|aP9$LGV>R#N5Vk`M2i9_kAy`}PWd}+fvzGAQ_b-0YL zzx?P8?^5WHlT{+kG}Ud973Z6%=CA;DB5H5drO`}=uRFujZ3$$|H6AtQpdWj@F{}}N z!+T?;kG^HgX?oV<2JCsifv(WffeI^ez9AmzSoCoA8*NBsx zCQXI(4ppgh(aO$h``Nu{&p?-M-fB%L(dT*Z!P)An5viGz+l(UzHMX868Gl7+b@N`Z zwY~7UsBMF$A3fHExu_Su;#%btG2?RN}U}62;X|YHad(65UHg z#dG(XCfzMa10vm;9O=TCw8X6a#Gr%>XRXdw3|M;0%2eMLx_n2|VDTV-K6C~pA7eU> z)MMAjQ#pb}&K|r<@pyZxZB{YhjlJ^og)$|4lh~jwR^nDgdT*}Q9kpSax*@roA$(In zNMz>ZE}oL8w`=$>hw@2_jN|p|WOKUNJ>8w4Ks0>s=!qb!jbUczm1heZUNTQe4}Z>&zM`S6>Mc=|3f=}LvPiaT9p@2zR9FuGWBO~lB=`rZy|Cx{ z(=|iLvAHrxwG!Wh>MKeo-oiSXOJaC|ivazeB|Nb?v%HxTP~d#^QRma1cbm>Fj!e66 zJePcm4K33UsBU$R39q$~X*H1&!@S*-#iN!BI}hFloU2=#vHtA}$Z|DCE_`F|{ z=f3!+H6b5Z?m=k1iyga;W=9Icg)=M6uI}2~q0NS$fE7i=_$`6ZX)^!9L1QOlb14Ue z*+&%XED+bSbyXtvL8&=~|x-!}=|L(@qo)a|L05 zeK+n;$zKt#fY7^&(Rc5AWkW%)6er!z1*krtw?Phy1n5rSU0hck%p@m&N&e zJi3|@QuElHizLCkfVV5+(MtyU4I#>%JXx?9VW*p@araSdJeZfSQWTG}s!7jomif&s z0CBxf5@kL6wdMhAy4L4BR1?H*VIJu_CY$fz?#3K2UqD{cTZog#GP71&JEpfbl~{ac zEL@K@RWAv0oR%5C1DdRwZ`5hpc!N%Na^@xVlqciPxQQT1E7aMa5U27N6>-;tL|ksh zJU-aq&*Eaa%zxBq$G~TI!qNB3Kp1-(BAs$xoM#@NXm6on}R~+y_%m9d~0$KVB``l!4%Msd|yBOWfnXrG0EO zStTszhLsvE6dQ&#bClP(^V!MvrrkYoeCXKbVo)x2ZqeN`+B_v!FS|~M5zJS5+ID!U zv`y(mv#wmr7k(WM=VafDklbK}5tog?yxewRS_&k4*qr_Sfnf7;DQkoW?s~}8`Dl%U zveHB+8Tp7?f~E_lj0`%M1E;&w0*VnI^kP4*Z*mCUky4(MY7ygCZyr~xgyqEYizI%e z?TY1?_%z!o)I+(@o{UgYDE|g^+={?Wr`E@>iE{ocS&L&cI9@YWy>yKaE|&Qfp5%Z6PuYOgoSs&Co6a+3QD*r@_0wS3dw-Of`RAzk13IIBm7*8 z>_9pSp|g-b=XkO={m(f@fY=X~Xp`-6=nU3oPj{E4B|*XGN6GnCP@ z*m^Ik%+e4;Smw(Bs2V*!o%>^gNvv!w@}mp*etvR0L;Lq2GuSD0?qPyVxz$wULt1L1 z&#qpYI^1D#;fb^HS=wOm9)aFfl!~sA)Td3k_PUme z-inSYht;W07xjvi+DFxu2%?7}cc&)Q!#5iFOMH&mT%37pQ{K65>nWG10YF@6G#u3; z&cKk+-8s9_^_PRj$9H<_n|p_ZKPy}2UhbJfQIyw4i&3Y7=JO+IYOYO#YjY+Ob*D#P zrMErKqqH?D@Abs`4(9pA5>mV7qRvFl6W~{S4@hq4YR*jW0vAI z0@~6yYA=m^edc)VpnG?VVQRGjb(YNrMbnGX3MsUJ1^CJe276*z7iY}UI2;X z!KdXjKkLjEFvm*a-@D)Sep6C!5+wV<4Kn>sn(*;efGZAT={`LSf-JWV&ZH`HUKC|5 zOyuP7E{}~8N7&VSOrUSulZyMzsau%!vz=u-e$)E_Fx-s^JI}#E5dlMF-3CQfA#2O! z63ZtZg|En4I31r{0KYP#Oxs$!3qoqPqx2Cw{pIzznDV*?BqKSrl;PwzapVo5-VV$T03RW1_C&ak*t!TQRzbhp7V#}@brTsgq4I*hrpJ4Q|R(YcS z49(@48H3RoVopmL@@Y+M^hu_KbXPj9GR8KWfhBHI#xp-CKX<>m2h%+cv9Q}54#T|t zE(X(41}B1D;lWtC)de;@=zyq$2NDos$5q1 zx4;slpWjT-Spym+vGI7nd^B&6P)7E?sM>nKT_Lijw8=szsWL@poIU_>l6HAkz`O#j zZM5l)!Cn7>#nI>F6x5w%8B(sbkckL%912+dQsqg!uA-y5eBER0ZQyM*)5DRyL4}Hv zpqL(opD>{7FgMOR_2M+=rB|Ij;iH$gOnR;SROb5{D-jQEEA7c;ivCBqzKb9S{4O!D z6>(v0q(mocekNL#&i(_=T$La!1j7{V<==yNO_=0bKE}{Z>n_qyIM36#cV}QF7qtA0 zq}oW~T6V56K#;oWV74ww+Ek|o*aL5nEL4-RWMm2LZJ&qGnY`CXN&XCdOtTk7;?h?7 zw5#*^sR7bF7Uq^Uo-~SyWejy?^uQbCKX`y2X1}RF126D+HwI)};1}H6um+I$ zWd=eOo>U}AN_F94P*0)l{>wc)%q<8y%N?fNhTN}EyI}`fOYu_v&0alWNWWL(b| zu8VW%W>L1Q9T1yN-@&YQ0(E1sIf0{V6cUQw5*K^~#W`)c5Pa_MTH^EBgch79ERBi4 zQb!fx-K;ckh%XH`>qu)J=_3cp1E0Gd@u0m%MZ%7X!L#Mk*q;7T4?jScJ7X-Ku+-&An;susT#L z9>V=#jvhZd{mb~i95{$o@m}?z*vTT}e1pi93+U+8HGQd;8)mk0if&J>a?uV}VlGIN z$|tppRad3ptzRl%3F}5Do+@pMPj&!+RW&Ff6}3A!u*S}Y;XQE?)@oO8|MA0^7q-TA z;tBZc1;uxC!4ph2K`)2oowy;s%zJo4IiMHB_}FAJAm>*4k;?DD)H%oI+Sd)S97pir zzyyf1$!)&knyAB7_yLK^hhX#9I-#7rhbWA{diiHa!2f(FZcP5%NC1lQNshf1*jdq;NPp{!9ItFf7Sia60I4 z`iF<{)RYNqe`TGy$`>oaCl)@`hx~4!D!BJ7tBUX{^GK}WwZ*LEksiXwm2cxGhZ;HA z)lSX7i)5qt6Ln}4_kaSdRrHmmBRnFf(PZv+Y$T4feF=i+6Nwgo{O(YP4&h)cRU3qa@WX=B1GF}w zVVfwG(?Le|kIDm7Pf;hVHXHQPQKxMK23L7*nH4(&peY&zqVi zPSdT&skihedn_Y7kF0aT*t}BvgwYPn;-PKq~E|kootC9V0TPc{a$X zxI$@0Vj}UX(k&ZN$Wo5>y9i@9>tb_-WM6 zvlYf~nqpPikWgpV&wm)J4x%V5gAGE90|lUOn{gbYu+=xRfwAG=0hB)v-+!AZ-9HW?9&%WktrMDfZvWj0kqTqBF89w3e(Q>VJox_&QTo^OEv=dzw+x&(*cU|KL=SV* H;kW+)=R&Re diff --git a/docs/reference/transform/images/transform-check-config.png b/docs/reference/transform/images/transform-check-config.png index 46fad65b5d4073c4438e56f7d962e2e55ecea66d..916e7edbb9435a8392350cb11593bd7d17c9846b 100644 GIT binary patch literal 154179 zcmeFZXH-*LyEaS_MYePo-g`uvihzyYw+c!N7il;WX(C(oOijd>%Lz;(7nq@$4y5;LBV+M zkKZ3rP@L+aprF~NJq`Sa)_>cBf`ZcVsix+GdzzXTA9%VsJax9Gpt$zd=Pg|8*24>J z@KlxrO`G1n^*d*@LiXOfKajr8{X2c(C4V&m=pU<_!IwI_4lc{mh(1(M9=ITLTmR-q zmaZ^fwTc<47PeO|z@Ck1?i$}2=A8~)H6ZUMum;7py1pz&Slr?lVqlf!r2R9( zmiOl6$Tynj&lY~m<$CDAc_ZP<6C(Yk7zQ-4we+Uok9vtAK8H5-%yVthT6e>wO}3>x zXnE-LI2aTopX{Br1DkzssG{MPUl#%2cOSeIR13tJCv!Y z6N7q25oUJklw`UPbuj27#1{gB6%~<8Y}hxM1|t!YN0cUl{c99|-$(!4X+(S+$(6 z)kr+pkIK=Ejtpm3tofAUn6fTX5sSocC2&-7exfL9S)52dzS<8askRWR##MXsa0G2{ zR0t1~(&A5+@KVzRZGX97yx-F?|5*Q`#2c&ca1Zk;2RbK`j{ky2HjxJHhj`*ICv&!U ze|w3~wP9&g0##Q}=UZ1E3EH^SsUfUjz;yn^n~_P>aJV{pi(ZTcUHA>JQ+7+b*Ru&` zQu!H_X}|Aw+U!oF^|9>=?2M~yoU%^d`=VuG;ir+wFO5`HYL{BUx4JV`Q?~<_13f6s zEI$uiKCwd_OEhv=T$~06+!v?Ya$)^w1Z?O+W(yO254XyS*ooUOK6v##zFoeTNga)s zm`V%Fu|}!o-3^;yfykze#Z%qNE zfGS>S{L^$Osc3+mPV<2Gw-ZBfR0>ldtn@U+F?vXbHM9$-Z&;7VwV|trjOrRkM(;Fl zop|CelZ&{)PPLH0=rp$)xz*L1X9NbFIL=8>7rlnOsk!s7BlO=N9CfH78)19%t88dE z=cg78(XQW>@ozqw4{FO$nt}J(0@#nQzD5ci@l9bJS9mGs8+(V$mMIZ^x=6=T9q=`) zX=lcdQ(Q-E%i&|}MOea1!kLt$82ENweyodBVt`%EB?a~BLP6aQ_gqB4e5P>aeVAKX z*wD(X>mSDl`Gq&^Q6ucjJ7reU)2@>K;H4BtsEN{#~(DyY?&?eEh&zW`sw^0I0WAF zRlltcEWZ%P<=FXk)#~`!rKEDv{mxJ(CvV4p;$&)ME7F|$|NNH5v!VGY1U2i)Q$ajo z0Xs5Tzd#QzEDap?ejnRt;ng{KN^knzsZ{I7qh(L0=gnQUAeceyWRZEL`xGU?P~AD} z*aN?O)G#e{%$pK{|eyT+K~1Z#QnQ&p4iRiERHbB(NFi@H&riKvaD5YVHtfx z*K?wq`ny$lNIN|tHYuNzT-SGQtWrs;jCtfu(D?@C&~VT~8#bT(@H%PGMxSe>S^R!6 z3KX-qm8*s~Z(Xi3)zNIO}$q_BD@u!?|Mg`D1gV2uoyjd`~%QpJdH7 zxsM%-EtcDi7HD*>U1e*H;~QH<`koj*MOel4o_VUWB&$$C3%>(vOkw?+5;%IoZ%nWd zwwr3RI|9nGudSl}AzC@9))9E)gt?$Kh|);gKW#=dIe2)NPrl>rhqB zJq-tl*Y3YQT%VpK@Y5z)cm}+Hb-qf_6084h1%`8LRM|r47rvS~J`YMNw%>Yksmk&) zNq$8?jCo7aq4S~cliSR zC-*q4gRckUd+!Fcp>+wne2s3nJ)OnL`k=tI>IG}scy*E;ezODG*!+@JGfOR`k&=Mr zk^AF${S*7;fv?l*PPsQ=!YNg-zfRpWhY-VCy5PBN21mDl@q0^(LmS74`#V+_+ct zm|Lx)b6Gq4ZA{(nC`Oj5jPvzGaEA-cm`K%QvJ66T-7h-A*gEJkv@2TRK)kSWQi+DT z3}0WayjCYXo?0iz9Ue3KOL4*KO~b1|{B(3wto&$6MZRoTJ(+Z7dY{LYAzo$QKh`Uf zNu=iIZ_}UN$LT%KEV>oV#OGuXuv3m~oHMpCobk`0hSV(f)irHhA(hiOZV-IsQNiZ^ z+oeysmHOoy5^J$;xcjZkFM430_&UOXUtVczbp?h<+)8%e$Ld|*e{uQFYudpHsRY}s zwBIzqPjT90qTo?0UZnGQo)BBK+Mci4U1HPJC=6{dT3tj;ShZsk&#aKgQqDM(vVlxi zchXc}4Fz6GaWOJ9G~5o}VKvmxzm>CI7+61kP=#JFGRLxz{HJfj7`mlapD5tQLYNsA zGI7z~^Ct8DrSSedJ!VP&vA{7t3~y!&I7Icah64JhEpRfiY)^*!c?H!lgGi#_N>#sW zc?4)r@4T^IJt-=!F*nVHZ888?PkgUaVw~mS`n=6?K&yfVcW1RE=4gx>cAjQb``NC^ z=6yRC!LG#R(Q>ehp;P*pQvq7K=gHiUc1)~~C^>lDTGk-{Ro|R8jO8$)Up1N4Z`9!% zrE}aiC#rL=eSYnwKMpU2ju?&e)S`;-9v$EPbB@x|J?QA2n1lo)3pMYhNBhyUGi}33 zT574*E7l>!rYl@t@mYbRTrdj;n;g5+XFf>Npv0R!vPdfq^qGT#Bn8-FD!8~Hw5pY) z1cs(W#MegPJ7J^0085-A=JdckV?UEH70!&2*>c{oE@C&bdfH;<#;VzF8?RbAq@>%$ zGPRa;bYq@czS`s9kUhwrEn4PfhF-68h7c zOE52mvcv+LIL}*pv?I@i~cj-5EY1 zXA`G5|HoL%_tpFYD5J4OfX}3lr=J>O1y(YjD!t~_xY2f1x-agY9$7x1fzNMJwbIE~ zIW%pp-fxS#VYwj4YCW1BwnOh?;Z9PtfrsGQ#T+DT8Y5;Ep>#+~h;7dDQspL)jP#cv z`m{JyP$p-B7d(n4RoTYVweg17 z^{zXko9|r;jc72bmP!d20zAp{JX;0d$!ZXr3J0^=&VY2l0vc9Fo;0`lnGJ1h6R78~ z3fYTXAHK{SRZH51+$-^RvYJ2uE^f66@JC4_MH_zpn>nObu|w;oQ<`HYvKtu}3+%bp7E6uJl zPd9@b%>a+)54_Thyd9J9v_F?k*$_Jet=n|!tr&BK`Sf~i{7#ITIL%XNy*@aku;seF znAIPO419ZDZ^%AftIjUQzzw`6`g}I`Xn-)q5jOh%t5M?x&FrfDC#7zIjJj5$&P(5n zHeE_gMU^DbwdZaNY$X_5(*uFVt==t|(uSodva5^_$ocT-4)LZW>tHT5JrX0Yf&>G% z%#~(rWVr#Cl$~R*IkpaydFixKdBUiX6rZA}-N!SIK3ofLbDEKR0R6FY_FYW(CRY>$XIbRqL?745TtsN`Vs!OxQ6Cox05d+)jjfq6GL(#tpk^VXP6JUZ`-!aWXDWk6Kz zAW*a4Dvv4)em2w)GVPW$rv^?xH)vpwUr&XNp&O|n#F^XEYmo6VgG!ekpBblA9n(IBl7%yIwN8S(F&HZBS{3D{Z1_`7_ zJeWuJK74ETvgt~iM@(+##h&1oy41?BH6=_0#;xk>qG55}W~%#+VG@U`^|-sCDR4^{vk3kV3tz$?}SczyeXQs`XgATF7R2LyUMsq3*OPv32;Da1m#sx$DR_ zc1BNy?N2SQ4_z6`c_;3L2cv5*-el^)QFZMxr%fZ_9^ ziwZ%y4-O*P>zYRvtXL8)eN(o}e=V~$5L|yB4$3-+Q6m6ZOd4ZPe)o-z`XN2_*k2Q! zV-1e%C>>$ni8wwmnXBco*Px+MSFN!F2-vC}&O>-*i2$~px-Bwim+C@c&u6U#?geP| zZjC#}g={?IKmYwIkK2b4=eO_;+4j+2OQ7_O&^)m?%SzXY#$-v`M)Jf)<$7L>SW%Jb z$8=#3?hd8rxg+lH;+xA4f;QjkmzFzQ0a3y`41~+ zL~5Q;nh}#pMq?{1aY~!(B}-R%BKO`SdCmx3>PvGwkRK1iQr~WZ;hzy!4$@Y<#7djd zBdg0ilJ2j5py%5WP{sM=%c=)6@#;~BndSIiWo^E3_n3G^{v41SeK3aZclnNCdF3V@ z!zIYXt>}#zp;^&g7EsJ;+05?rk7)klNs$ToOcy*FX$`rY?%lUxv-CNUZdJLAd1I#y zh+9?158{L9D_)+NL6n<{jy@>!xSV0)m@{gLb^)UPmL+&qvSw+^1@0w}O)>qhwE({I zqvq}u_s#518pnxRKW4}Q#=3D=d8Lnb0vhmASPcGYilHP`7UO5vE|^5y#usGp0`*wf zsQ92`=Mt#^JmTE1)7aOJ$w~RvjBcSWGg(-))%~MD49J?=8yR412QMJ=fv|lZ>r1ip ztOIf|ERf(LtmcVZUAopcJ9%HAY=ibwKryHP$bxLxE6ny>mL9}xtA2fM;Xo}e z*rthbh*|s!)UGRE2IQX@km%{tz1@8bGwm^Q>UHcR58vKpxS1%{BqsxUh5@jKiPdZS7--r^`W%W` zeg7I%H~qTr>Y(vFANZ%Ph4XJ>Z+f`z2OpHFeJ%|ZD4(cJp8#Vjy^i@OgD^R+>=s>D z*pAWk9w>{>gWb^fgTmHI#7w-)Ypu>rZjal&=B^}g#;xK}G4>|d8FGNzb1LfjML zw{|!Dc?$^*bp|{Oqx|>-V)?-M{sxajzh0|!u>s(U5X~{ura;Dg-(~XC&J4p3 ze2tyu{gpON=1C5td+Z3u;zz}6opTFnM@IXAMCGES5z{2SkUpVlVKN2NQ=?6?+{`O! z-0bM|96;l0;#B9qc%L?BC}p>B4>PTJ`id??*-zIfSw*^TI}6m0{j+5u)$pZ28)W6} znWhljq^0}3sVf1ACI1@id%+88wN!1?D^_p;TFssMF;6SJ~{%GUPG z(i6H!htsXLz2rHvihM`PORuXaO6QeFaci+Ivw=q^SWUJ1E`cpC$s8x7n|>jRSs=4` zK+$=)M8f{Q;S1?c^J#vQ5^noY3qo{LWv(~JkBb8lQV!(XD9APmh;1X!U2f1L zZ1u;hM~Ye2e8R;@&(P8Q(XvMF|MJjg@lB2m-Vb%i%Ft#g`7o#kZqpv@@6aEde~+)P ztT1WN3C*$&+3{IEe=y?)lkH3wHpW%p%{LH7=GL=GWzUseu2LX$0<`3f<4QhZNVEo?y;lw3?bvflYr`{TP0yDqZ1Mq5Q)ax?G5EEcT*C4=^z4%i8 ze_&%Y07#iaMdwI3^;hWk%Nij3NVbL1N2c^w(#y!gk}XT2r+?IY0GGlxIasn2$efr< zNBru4ru6Uk|DU(?Ux|gxe>UEK&eFeg%>Nx0FMEdEz-dIPAJ9Bu+9;`ZE&!ZrrhDp9 zAotMF0hCJZ(!H;*iPQQ?=os#b4y*!}kjTse0$eC9@S7JYR?I-%; z$>&s`)&81Kr%qB((oR<>eT=N}UFDw7ti8K9APMP8p(whT6n@hF&s~-99$vqql4by| zthp-PJC8*QL3KmZ6a9WMop=P`6j(TA_ZK95SE@{X9ae_(SundRlAwxW3Q;Yalg=;O zg62*`XGVXtNS=Jv-_8S@r!Yz%bb>5D#cd97!L95*s{iz@WF;4~dP0B0q^Rsa z$aNl20^5X+)6VkbwJ&gh5GMJWopkvI&wwjbr%gFd`ut__iy8&+jAJm?;VEUi2wZt~ zbLjTTLGIJj_AI;Qo!t5wG_IS4(vuJwFg{};Gwf$#q4QCh9y>9M~--gs&+{Pfq zYu!%T$!8kZsa$^3Ao$|f*H^r{L4@+}R0mBjg?N)Dx+gg0eJ!je9)5(dXU|cMT>aa5 z{L7tOyO*ug&=}B?U>xl?83O$jm#=-|Bx+=U_HNw)JCD$z%;C!gZo% zt~>(}E%`5h1vP%i&)>#J@e~R;WQFXmZ?t6{Qq`*4nQjdwYc=3#tNK9uu~=?drMkU& z^Y$HedU}9jGx5tP-|PYjlfFrMpqtnGI-n6@)Mp~E>*qHDU3P8 za#15Yg=WBgt2!%c@0m$G4rCJ9TS5km?)GHSMhd zZ8Kf00Jt5eO8O+>I$p)Su)E*MKdr}M#iHcv+}~vI(VFRhV%UGJ|L8l7wBSfDK!MW5 zE#S3sb~^9j$8UskmtbOho=r?qMZvjH`_X0=3@$H=Z4EN@1ChfG(NkT0{d~1siBR_! zx_1tcA~j?9J!VU%qXj~L)>@_i@a?%2&B4(9q@{MPLFg<&m(CE52<-qnN1wFIrl&^j zp84(!8ryl?xtcS;1%QYX@lXqPVGcn`V^>97y0m8cLtg;)k8TLrx6_+&Z_+tL_5?z= z;U$Z|#Y7x}NzQe>rvvh!03xudB3yF9Bd{yY4oY)f7->=Ne?ACCR380{3eT zTJ!7Iw42{Jd#3-2^)xOoBVCm=rO6!KAx8G{MI8gEf$giYMOW7DN1bHxWMg{_1j_Vr z&EJ7W3ox2HH<%9FM6WsxjscWIW1p5d+i3>;)s?z_CYu5~^~3}tGs#1t!JBHU;Cio> z;nG7o^@~S6XJ-8&>l{Mh8kt!OTH&(-)C|t`>ed0K!!F2-{)5%Zv8%+uCLP#9!+xNK zFQK$83*p*ktb(n@759{bk=w(s+Hu{1ozi*=8?*g=2j*@Bxcec}rkkN;KQ{cY1NW~F zj{Jw1*zMGV-3cL;gKc;d#41oRL1}NGzlj@MY*jFt8MJ0K6HII!$+o-sd?7PP4Lknx zt@;Iqp51Y;N@ss|o|USZ-2ec8y=tB-QZf9eP2^k>y~mK5$j7_!UFmdyj?2}TAm8rQ zf|Rj!OuC-tmU+qzuKjxq4EbacV7j#IbP31ke&9^^Xj8e!X~al>7lRYgCDV7(@=z=> zm-_DsWpL}N&V4dlx)aCCwN{S}lldNqDXP>ArYZaIZAGeM)({KGbbQsju3$!BcYW1e z5z1J^prW);u1*@e6=bFp;aatqIMxRlp{9{*(91M`W<=3?>39sG!Af5SP`fpNszBbt zvsVCMZv>L0>eta)HkkoVr;WIp02KBu3n;&(q)1RJ{iV^e^lLt8FJCh)wc4N~M9blZ zzM+$?bH>r51TFkQPi@B=pmo~%OM|AeB8_{}m3*A6{MkVV9w>XW)m1ImV8>7Igb@t!ikdz>~va$$^CBuYlC)vFQseH%Sq)4m%^iGGpw>9)H7 zF4oYivGx#158fF>22{p&!-%g6B1!aO>_eu0;fLzGfQ8i)riNA~Uc(e%jSYBQ%5Ew= zCWpkFw$U;$|FH?kIA{+o=ZonvRopm_m5xJ+ENCPkkFPPqZ<4KnU*cl@DpY=4=Fl<% za;*BMT5N+r<{l0<(zcWp5fVb!ZeJVw*M9%k2S>HAZ)bac8(D&@3##D8w```ef5@In zvT|xKX>a9;1ijxXt=mCW_Cr!$bzDJl5@!4Lq-%9ZmBvu3=}gEtR|U!jxw7(2d5stp zOV~{XWx8*)Kngv^PekJsVe{v%#TY9{8^Ai{%&X>8)j_^nqzX5bnnlPU+mcB6I4hv7 z&F~wQ9QWK}td7>q)78lm-38q2xF5>Wrq-phtJcq9VJe)7K~Hth(Zp^#Qabv$4hcw# z938@Z+i~z*`7Kp*qAx(a>p6U71i)7xrbFK8#k84L5?&h>s;r`&s-n1^3=3KW_=Lsb z(k0Eh#I;JoiG7=@IaMl@CA=gz=3MPj5qkToj7P3nusuvR0k0h+THy!|ywZK^gqMCp zTSgoZZaU%N0WZe}1mtV02XfA zPM!USOQk?k!NweS_a8*q0RZ~g5g+ysAgE&tkOmm3DDcOg*#Fsc0MIF_eme8SjQnQ@a^%PsyUahs(vaZrO?bx5I!))8COK01vLpP@^BYM#94mtjoG;G2Y!oJ=}X)=(t zcS@G9@x)7h9SYeM%OUVu-XiSIB@LBTQ@Ef%eVuN5IMbiv;Gv-8x5lF6x6G;JvwRqS zSSN-X;PaNR9X~wT_NUOZx%pRaroB7m_&(1dvi*HtRB8L~c^<5}5!Hd-TA)wmi1)Ic zmSYCbcDIu4hDL^8e*K-mw5r*ySeyI9w7;k7AHa-DVG12083P}*ol{l4b)T0#wHfGU zZ7loR-v}sx=)HWgsaTf=* zsp`u;d8B)4XwX#DeD4A3=3Tq58IBc6@-9--;8C|}HGu2#MS{wiKf{jit+1iXJ=OYc z+cr`ox|o1(eOaG?r@Vh}|G;+1dDnF5V*$AzD1lqD9zEDdHlK{vn_Uqyc_h@rn{K zaB#gkwcd$Bgo_W(0*C)Tr+vsjfESg3HJSA0KIwD~oT-!T<9wj+SaEsFvWg?vWT z@s**ZN;m{BA|4LiJ${ZT!U7M$i+{v?1P{Rrn-4qx0A5T20}t$fdXj?qUq2=9!U)}E z+XDb$BPrG$>?#9#`3BmV)>!6{Kpc~~YxUD)ze%ed_?Ht1P?+2mz%6Ko501z01CdKK ztKNPHekt!^K&4g4jv0Vu2>{k9i(xnqBO$`m%;*|CyBtnT1&I_QlBJ*V1-I1n8 zTB2`NjoNv{;Y2fkLWWkwj_@6E}?A7707`AMpyOVF10r5-} zy_kaVg7%zCC~+fiWZn65f)Q5)N)n3uKQWqg(Ka04)>m z-bmpGHn51O} z%rgaH>+imhE}!(DDOw-JxJA8W6^h#GRu&d1`A!_!_n3i@30LYimoa~_roV4me$7-3 zjJVBVe#NN!A;&ZK`;@2mi1^XcyT zY9(2;8dbMM+UyW#m7DZ$@>a&Fd;ADn$b?8T@XK6z&8@1mz1puFFz43m-O`2M*tzvN zP+;a&7PNlX!C$tNn>67!{pOC-~?zC+$MwhD+WA?;(LnAM5a{tFp6 z)_7bKaSKNezCzDaSTUNP6NFRjGjE9>g##k_c6j`)V7_uU)TGUbebPO`_*ebBTk-u< zw(7)n+rVzzyP!MK3U{(}2%X#bpO zCM9X2?!DM^sWLEj74Nj#ucyw_KwQSm`pJoCr>Th|k_tc6%%_4Qc81Igrf?lj7PCpz z%xd^QI92@rm~@*OGROWTodyz(jFMkBgxSFSrH=ZSb!(*@=QHuZd#P!)Oen#)Jz=e2$K5W)0YPkBrcwGgc z9{aV{2DT2}lbs&C!ui$8irC#TT`{)C0N?9$1>^`!1))lHgtapxpy7Z8zQ%j)PRZf# z9()YkBUs`hwz^a=6PJw(jMUk#>ZI}(*X-A<_h`lsC~z=aK$>)>qcG541b-qmv&x5q zUDMo$46Rc3$b7ynpdNzP!hc%uP(;>laF5(tBA$pThD)Y<>|ek4qU?4}79nd~%{4XR zWgBnMqse;6{!?!=)zB|$W-WcCwr%U3^5IR4kb1?5cya)c|Mshiy^DA&KjH1dDeoGr zfPOL4*mH%qGtp1eUxcfcfQ3k$y(_Lo-Lq1CXZS@9tdu^A;;!4EJUf z#7i{Dh-ovj+Riq-pF*Va(VE-i!$K$SY}F)@`+%~8V^&V55?hPSYUeEC1ys)n37|S; zP{DdRtcK(pbtT7V9?~J{F2(@J&G)M@ByQBSNv-kqAKst}&y1<|%2x zwM=<;Q4*x|@p_ljuMeCG%?Eh5txow)-YxhctP34>xIZ1N=rwel@4Be zZ8gF;TV2u-U(7;w2EQR|2y28!OTUSv3Gp%y0P@S7Hg$&Ynt`JXjZGEJMWaIU4oE8! z4v68EQba2NIwf&rw)D6~^Hp@xBf8WG&#$<1Cp|qsWbT(yhN7R35Zwsnif1!25i^%} zZGI*quM$Jg)U0+VMtjfpd`U|J16Wun%kz$0@GF-Y7(1OAe-N=9Ku$2TW~~&AS=~+N@SMEjjg?(kOkWe6j|-z}03Ny^v05lj$i8 z-Qe_W-RNNKMhnAAty31_V7$yq_@5F$rQ2Xy^kD^)h3}rBX|0J(^_4GUb~B9qpqGS= zy*Rb~W(lW(*BVm%1x3x0uV*Y*3t-&oIjp@y^Xs~5(mp|KAIlUNDoDdZbQMU8 zCH=Z%gUO|yTcVYDP`_6C!>UWqdRRYlPsuy^t!XQg&i2~*j&Np@@Jcz-b;&n}-0W^2 zKjC*4kbXbjHLSxgKV`6LAZ}u0R9?c~?2frXowooX%+Dx4;o_=b{ER?Xdz#PAcWMBo zgH{2>vD@Ovi3_@ldsStt)eD)KqpEmc>oN?((pPxAYw>lV4uaW56-#Igk;GAlz~uj8JN|!(}LmcrE;;)hor+ zymqz1OW{TUn|MR9B2dXVyHn?<860K84?!=vBP64{HwTd`I+&7kpxO-2%?%sEYGqeL zE*_}kir@@w_1Zw`>m4YoW>s4R%kI3>D*nT|Gq$Z_)Dz}D`!?Dkyos}mxC5=U13j<| zTrzBb-AXUr*@U|%;rBy8tZPjmxQEhaA)}I6@HxfP)H7JMX zY<`r4(s1oZ3+qCWB2l{Gu8*=k z09*Q9kPqNCO_PS_|Lz6wZ?7I^bmPH>bPuxurP|M> zU~J*>&?In4@O_|k^$f;;W2e*PN!8|7w{73*EwZW0pkd$lJTNi^)w`S&;khyRQ+p4n zqLWHnLRC%%3f00F_-p5ZO6_JK{1QNKp!lk+hh4{tzWHEk&cP*pTN03V1#Qnt_A60C zIu1|x5>DMp@w7ZYz$WZ5WW=!HMBgptLJpo241iv4HD--h@|*{kNFtQ?zi9RY_2(~9 z%;vVS){)dqc$fL}Hp3;;Bde{$_=zw=6rbO?91l=c)#{dzohMO8|op8COWun{{*x9Y-n-MYx+wNPmn@@$ZA%BIeFT6u@-bCvzcIG07d>^!WB z{iv}k(rD?{0MlZ9$Jm%N(FNcQ>or_|J_-zIeZNIF=r$$*%4x~ktlHA8uCYh}vYX?- zUpXWE+C$DLJxtoYC%bOTvIaqX5Nh%IaT6o5TUo#lEmF9jh7`LAaSh=o5OH=Jo5L(&CA3 z4O|}%;*1##D#tzhx2<PGXr-HYFfPhEfFYWnajN2 zG$U#I>denlPoS#4aRt_$i4=qB1A8?$)b`+T(T2RLjn%yvQFjGGej6*SSkHx!(E^Zo81%_Ac4HodDBv)(eQ`^mOh#*F zP8MH1E9%&rcYu)Ju6xjLW=p&QA!q3`h!&P=HT9D;c!}D0W&iz|+q)jOc}z%3%r;gX z;s6_h3qvjNRvF6O3=H%%-Wz{T)cAB*DT{@^G2d06>KTy4ne40rFusS<`=b4gDrv5=VLQl~3%FYlUSYT}& zA04v|s-eDVqm7TPn7CE`^gX_)j9$D>W+D!%&}w{Ie#EY+&eUWOYB@$?ZkHdDvd@Wh zB4-W)#R>1ea)%Nn)qnq$Nh=E)?o^TYD$vhsJjgKpSZv+qgG63hyaZutkJRCBwlpoRvE z1JZ{16WEQG6;N5hm>b9lcWoyaIOSh)hDS-ZRO{^0z8CY5O{35@{5dj^$tk+|w2a@4 zgIeqUVdTR5A(#UG^%qcR@gX!?xOg*VuPU_8xNZT3tr0r#>%s2oHRzT5O;*+}Z>rd~ zb*6X96kro~7A3tNKI6Pm(Rv;}VK^r89gisQEXmi)LL+`W>KOLegH`hTPg%D=VKL^Y z(@uX`2~_C{URo1g+8e!9_?*Kr7sxK_G)>&<%fa$}x{fFXb1+bptjlr1f32FMF$HA#AP+NG`G|Ph zn8-oSIv^ilXVz^t&+P*x8ka&oOIS%4vnvJsnspz}feSCOZy}`pDoAfGH91)tdh}$c zT$%>*(VrDy9UdU6)P{2ELUp(Y95VA7AW7DtBcGlUk)HMgUwQ>mbvZM}n~bvmLf#KQgt4;%noY`( zbo-rMWo)i-99eHyw9+D}P)rdNt9-EDq$e9EI~bI-CxKx6!}be62>Q`Nm! z(Jdnne=&sl44F!6ZEW{<+gBF2HJ1k9r`wvQoC^Hs#gs}~*wc*M64%;Vc%}rr5oZ|*sNZ*Qb=LLP;t<)maAuI$=oPd}?f zWlHjh;Zgyth8}fQI_t^HgRT%k=}N28)z3**QhQg2|6E!EGQex&0SoF*2C+G#G?3&y zC$z4zYgR*z*~cf5{4b`RaxFqz45!oFPTTv!AT#LGhf%kCZ%b0N>h8F_3V!PHh?kaV zxt(v2d)T1jT>vAp2pq|R`P`pa9PygRRAhxJ*|H5jg7)_9ctMaG=I$xjANHAGp!^A_ z2-odiN!+dkwq9SS!4bhh7(3fK9f*kxlnn~-2QI$#Zte1D%ixw9Uq=G%3Th)oE5}9h zh6?3p{PKYSe|3FR82b6r2#^GDgdSja`o2G1?8%vilSAak$=@|$4G*ZDR?7N!r-3q9 zEhvli;J7W$@-NE&*0(8};QH;ac_cod&x=XLaB%-Sj&m;xbghDW1sZ|2C~vzat2c7Z zn{b!!Mv2OeT|p!!x3JM+iDT-w7?4j)L96ABlxJ4nyArw?!m1uE+b5AE)4eyjmfT#S z^~|(zWq0^tHhl#po~F9zFd;OJ>Q)2#_sqh|ZW41%(-o7i!}kYoq;$G(eg#EY`i)%Q zo1FNyWD>f+v~X{D*L_zxc;uC!b79T${gF}0DIVG7JfMUJS@PpFCX$9dSv{y&)~z{` z)px?aZG}9ktGc>UVz>!k2yHJIJjA8y05COs#&c5DjYRAo#BVlB4=$bSmX6xkn@x9baOzWYI5NIaRpn_ zBzMmrcgO!QwS9((f=@nLuPG@0=6M)r(r2a({@}c5;jwpn`WZE|Rzd6ctE2)e^sA2x zSrCWzs6nWH905}ZGg3I%e&3(+ycAM7z7M%w8?Yn@VVJ1xetU?hX+^kHO#Y^_i`QS` zM0szy0Oe({y#81kj>W7{YSAdo&&7>g){xiqtac7A6P_5pCD|F_?*JfByeIF}5_E>Y z%FSQSwB$0(X`WS2?LDrncCAJ~St`I2k8FycVDNF(Z*^2Q z<&tlx0zUfyRvwFb=C%VxtOnrXFveYHfB`Nkp-HBe6ozcDL@e^c#XNw&hbdG$S$h5ZR~#sM8ZkmUgd zUpTC9*Kft8HO;rL{9G0!xlIk*GC@PDJ{|1GS>Ka?nDXyT0x&^l2HBS1s4O=hSz|;s z;d~it7TCQSd5mV9fVx^RZsM>dL-#h14P$jc`oR`Z_!ZgSE|P>_HUaoutxU#&Hw0rt z!^r8TyI1IGg0s4)%)NVWHv$R&E!X;B5_ikZD~o9`N+u>3#W-24 zA+hiTe$~KF@PzU8eD{Lc&hc4Dt7dd4PZT2$`+Rc9Zpr#@^ny8~&Mo;#!75e|lg-XL zJVZ1QmnmPCOY!YPS2K2#kw;D?fD~sl84x79U-%Q4-t~|NM;cPO+?%RBXWQ470mmY+ z&XogO>brIuG(~#RF*Vg&JHmROv>ar#Hw(n>e!pPPa^$%;3 zdh24^Tp1X7A<1nB3{YA{n$cYGCWf-*rZ(fRj~s}@^~@$wrHJHdo}n~5V8aZ5)5>Fd z+Id|+cd#_l>ExPo1t*HPC zEJ;lITZTh@z-3e+K*(7_Ze}Wcy`i14tn3)8LJ~XH*a8q0c!woVH;V`*0!-VY{L@zY z!%5h-?OwW?u;DV2btm5K4tM3rz4|3y1L6K8vA+nM_hzplB&4|CZ1j+$yS0-JGk@XL zool2+50J`EqQ03Ym4w-;-;+z<_cI7ND5?f|7x-4VtW^L-@?E#mY8RZpuH!e}H+9=y zja=%l?;4SVcxI*^TgYp7!;%MIlLDS_s)x4D<|WSonDVai)N9*8hXypDKGy|$sDY)Z zU7j2=)tmgQAG{ram2-gzMtc$kV%3fxuY}7$L#lzK!&;@cK)GV*?%7NpQZcTApB?7{dL0sbB_MhF$EI9#0w(nO~ znG`?N7L1(-W1^O)OauPl7g42U2dShz zXacJ9iObuo($+A7>@K5u$TKxs@e2TI;Jcq{`qAH=XgP_kbM-~J9=`@)~ zg#g4nsqs~ZczNAgJ1ef zZ40ngu^`{ShxE5n!_h3Pw0+DKQa^_sN#otDt#5)FSxVLl$OC<9`rUD=%#jP^Ei)y} z)qNf;11Qs18&=;{y&|O*f!BM=M^&U|1GH(=GI3b?40haE?7bZ1Tx`c}o1-*B*WUU) zEW8oW!E!_~@vx6pVp;ZgFg!z6;7$ot@YcLhLFvH(?s8KBw1NFJBR}t9=bMt&WjLJ| zLh>%4Pfp31JnX#yNP3l_s}W~e8#(TM$BislNBq^R(A0lAvN080IGHV~Nq%GCmlCN{F{TS_gRstb&gOz=&Kvz)4 z81V}C8r79CY~wS^aG>?BSJYghNnr0uTwJc^TG23AQ zHHKCL8XPfpv=9uv)n+U@0~4#B{S#=cOLYFYf1pN2V|b7b_ES;?ndWa`LBg z!(nS2bGK?qUSB+d%gt}n18ntdcU!tU>SLUm*q&ru2bjl?-O}!@dkb4q1ZU0l<41Ih zt{M9ozS}ysqyy-ZOP@pa95Szo@w?CF5Tf7f)?U<3R*Im!ottDk)zN>{s5 z<>Rjuz_*ov1X;-UlZt}`=*(Rj9u7Z^7L|@}yh|p?j3*$RJ3inK%E?;AtXk-qDuiy9In3E4Z+p-CCVvH>X7sEvvcl&-Rl2bx#x4dvS%wQb{Wv%Tg&mVt}HzQ4_$pX3kk*HgOzsP9z<^%4d{3E!4=%Db9P zA1kj^nfI=X7{RyP`~(YvGZT#AY&1CLr(?S`b52uVt2lx$)6_m^8KJ|kJXvDmV zp`>n{B&=kSI-fcmKi!U}!@|O&qVP5(6C<a4lvo-3HpOA274AHq zTo=j>YH2x-tu~Mf5;<*J+MHs~VI69T4?qli!}lR-2P*{tWuI053TN){wWS~290)W) zDQGYfJw<#vu;~8(*n7`tIJ>rgIE09ZG)Y{Dgt!n9C2DkuC{Yulw?r9bbkU6x5fMT3 zULs1AQD-oE8d1hzn9)ZcZ3u&57~V7Y^ZeJka>rWlhyRy%z3cj7Ew-6+pL_4)*uSIw z4&&;Z_B^stI44v0y#jQSx^>2p-MV~R<}Iv5j6!AQ)?`3Z%*{`y&)nwF)xTV+@#Ox) zGT$<~ZlAJSJ1_H|Qaw>j58-$t7B`ts5^PQ>2wWFkdG$ylO{;pu)`u8e0q(Rh0R@S6 zy0}c{RgetUQL=^7LUPG3%uhN`<#JlwAL2UilMJd8OJd^H`2i5nSeK~v!poN`r@7_; zXUh9Pzgbn-?&7SD?dTRu_KJCpir3QcG*zdKV=*JUwAljXiY0uY=B2gO)?yT&$7gEV zxv=w-;zL6GKfWkOX+FkwpTWRDqq+BS-QbFrnc?_HG#3<4Q2q0VnhTZlnte)jA-$Dw zK_Eh;+UdI^SfBA743S@B&ru{^`2yA}i|{m#g0+uE6FZ5{sP=O0|2 zz-x&9ypN(m@}qJTg#54RZ2KBj+jWDSi^6!e{AW*=8n66?Zh}w3nN5)n3T_S2$!q2^ zKdz)$o@K}Pm~)(bePG|5EkQfP3R!~ZubhDT!%H?-MRhtCkf!-gBLkZ~XdVA@>;63D z`GrCOSyYOExuE2DUz@+Y*RmKY(79S_vuK*9GdkFFe6*op+qI#Brlxoq68X)oT_ z*{Z$W({OYckpE~_1>;FLx?@;AE&?I1>y{ZkDSrMy>J0U1j*t`o{E=VQ_Tj6loRHDz z6TKc$H0t4=M}eH@d3O1D4n=ghT!Yuo-kR=@v9iyGywMo{-k+K&*yaUw7Mm~k8HdH1xD;!K28OKx((|apdm3hDnXBUn zjdqz+RDJVDs}o>oVR4n{H85uNs*Z0V|F`yb_3mV3Gp0&H?8ts>(TXQ^3?5J?zLF6{ z;ci@dTA>^TTF}x}NuL}aCRbtc7k@iJzYdhHfnpd(qfOUkOV940>x)k^WJbRaW<7cg zQ%P~nv2M`KmPPcOpcS7-wO-A*>S21xiFx@fbjW*GE%3x}``MtW3VQ_*mr6|C0~tnm z;7KtkXA0`9=9FnyGQx;$eYk2i`~BmLH<(Xr6Lc9arF)$V0%Nj`0B=icq%q#*56r-4rl|U0^p)eEHi3|JrXp{@jlyqqf+!;7ja&y4az9 zal*`4pdLQumc^h&=2X?w&PxU%wYFn1G~Exfs{DxFC?0NgnjoZ=GJ|PHQ_w6AqWn-l z+xf*!Qmd9`Gbo+cxcZwvC`(~I>(X*AZFh1-ys}1eM$5F1(MjHWVQQ$?OTIVF>FoJ6 zMY~;M<>Kt^7n)jnYx|^I)#6?$)f?QhoJ2%ow*d7KUtce}xfXE47l{?tQl%csS#M#x zMIl3MhYW}&k-YzXSUKL8QTq|+Hdcdcmg-7lDuza@ECqt=Y?`Y> z7LvawsH1~Idxh+LPk}UlsmaI?OEyiHh*MAEPrE2~yGtKCSSP*hW6{?IN~<}rJ$1IP zWiZory%i~!MO<}&V)h=!u5CWX$82`osIlpv`7RQC_#mxyYUXjRMzUZ6v*kg>*~O#i zW|r7L--?=gzNd|fRnZ|89yALownR0)PHt7|D|~OFo_vevZV+{)BA3OogpAL6KV4}s zEdP~VLu@>E)JmAw>S!YR9KI|qCfIvDa=q~Ou>9n;44%}3xu|v7>5(D0P)g~t8E1he z&Yg8&X|IWRi026yNq^S3CBZdu${t|9OulQ~5m1ip7HZ~*^;((n3!3>BNi&@$c*pT| z!ce~`iGXv3%0S3xmj*ZPqr9azh^bW;C`gWdN#NjF=2E2iwrckdL1u2_x{lvV7@uA+ zlblMr2YGqp+JDJe=p56VXyhI768&nGp-geRmcdffj;Gap%s**)Ddff>nKUDOKQ+2@7T1yu@F}7npGkMWUyqQ=@z=r@dlumL zT+xLZM~7jW!(QsWnNnO_s?o`@@`pV&<5G7?C_mj|buB*2s<(rPhHQXg9bHqO??5zz zJ6=I~5|{*dcqTRjdK2jXc^CahlVr3xLudIL7u_L0H?Z07e~|8Z&1tfy=|)MUH61p9 z0}ZRP!OoT38iqVZwf;zdW7{BDq7XhyBQ@?cbWd711oN7qa50xI{LGa~19lOo#7@4M zOHL9_-~K`!c4a(~O3;P9d~co!=@=mbRUGS|Sll>R)f=Fpg&&_gsue(D9rj!+u(s$0 zq;A`wV1`0)=vg-Tn^W76Lj^fo%qk+=*>Y(+eNo4%0COcJ0J^D$?U*ay@lp3f+l}Ta zYuF4f=E!xswnE`Hy_qA#%s*c5f*MPxZXuryu~4$}n`M#dbM(>Vo{`D;fSRNE6LXLG zF1~$D#_|!*!+4gO1;2BjR=$z~ykP6bKi41PTw7Dh&rjG5oskLH$t*KMn2^uMpsvHh z4*jv1jnRjyYp`yighJzKv%=KBP>U%$BDfB3XOk{w%k$MLvj+#IR36~cLY_>10lIV7 zMLL=kLA{6FqjTw{j*li~ndvltPUr}uye>a+<|(0?M$Pi1bv-J8K9Tc!9I9W6?Y3Lk z{#cMdc0#LEOP(^wlzs884`EWkwPm{;-{2^!rR7HmLDsZs+AG-O&a-gT)j8lZ328xa zZ1*Oa&DB49jA^9WHsSn!6R&xLcO7V{rBxJ;4b(^*D98t=L_q9_|9#ehJ4G!Q>5MEh z9JrtbHi^D2(@EZd_!00LVO5U$LPYa9zw(c6Yr_cBmIF|wUAE`U2yY|LVuhxX5Ok<; zbMt$|r& zG+UR>N8TR*xXH&JH4BZB8a-*Warj66RZpAT)18%4rGRHxVg=7fzBV^uLK>HsTIvLm z-BVdWwW)UAAkUNcx`zyh;#vB*j(+W#4U5|q4x+n1Z*c${)6yM`ef0Q|0VOlI%q2@!LoI{lDd)(f>kRF>uQ_& zON-2mOJA?tH@Em$B8!>7&u`{Y{s~@}?OT>`51cX54WB`Qk02^AiA-||nh}=Z*^Kg< z2V&5X!tWzAPr=!8+q+*Mu@cbo*C9@$`5$E6*0ZuHBhzW8?94SQjT)XFY;>fKdM+Vs z>Yu6m%iou)tFON|(qbKvdCy(nxb58j)a<61%KR1;a{>gpRncXDJ{b;BM6Rwoz+1x&zHjg z{UVCDuk@WGi(gh(?{7mIf4nmcanPy;E_}Mi20JyU_9m^#8Tb~CEq%&hxO(S3n`_hOcj+u#OuX7x2|7r@(Wf=O8&N|7ggx1tzvImR zz|Q>hRMcV$*s6}%37e$>+s%$1W6XenX&jQ09@=|`Gq#5L(_ zxT91ob#}f?=%>FPaugd9<~kFq0|0{7IJt<+BJ7*on8;_kma&A3|7C6b`t>G%8wYjV zwq|9W(@1r=*y;N(*@gc+^}uA2XKXZURC`0Yj#&kc_do@zJzq$V#(!b)|5+n*Dry)k zfN;;_zpod-{0Y4bMDR4IY%Hk%5H;YTgfka%3nsuFamPJ1KjZ|J$r%pfr9a>YFhvK$ zkP`v~3z?WdRT=QTv^1ytbWfzE9J3%7)Ea?zaUhMl&mH%@d>rrjXf3!#EM~rhGk9zc z8cTWI*6z)e08>KEX+bky%JZE~SK?iBi@%TRZhQWD3_>-ya+5N8 z(`f{HY3%K0db`p@@pGxgi!TmrGUvJzkR@^64l#H9EX1{F?NwvaP3$_fjW4lSi>ObI z8$+k=_}LHh|I-5d-$M|hKn1l0={t&C$?#AsqaWz){=mgMe}Eu*Oij6WX7QKmvmtS{ z4(=A|EDxt%JrXATyklBx#FwfdGi_|rG`5 z(byMhiMNGC=$I_{b9&&r;>%?UHH0LY@u7ccx_++Qu+#P+>j?W%T5K`e4o$0=e!4Fv zLjq={GCl~(@aH^s1B>-I4110EzxKDR=w#kYkUlT+?W6iORl2vj&*sy>Bn1NT-Jd4P zX^!{)v@jl%>u9?4tH>8(V~o1m3cvSy z>yj=u-Y_pY->04Vu3|YwifKA+zrcp?&!v9vPh%x&SADJ>_zWs8Jm~LYc|{rP*Y}4m zX%K4qDg;fB5z7>{wqB1_C;TOLm4_A*z{Hq)rl?HpTUg>&klKGvE%frm%9?93V~^sP zwF;RZ0WTjjoXc&k5^(!a>gZU_Xh9{@{pft8HZXtPb?3G?&SMnELm-T8B4%dW`==2L zNU0N`H!7EgFC3>CSOE#no4)NCbDY3EXSk3XuGR7SxUqg99pfsaVVdW|-(4^NQ3a<> z0j=RAEpgn~8-T_HW?$cx$@S+3geVRJO5-`Yiu1TJF+k5Nf7M^O_NU}g(R{hep~#RC z!+6|S9#HWtz*;VXfAhylKFx8V7FfIC7f#2HF#^-+cT18|{X>QTm0L-By6;P&wbF58 z_JBH$dR^Khb=;o55&+gtf}QQSv46Mn->u~MS1SLN%Kr-m8~XG{uK6zMcBX=>j`COA z>0=aTC`@3g(*x-HHO6ZdBM)3#ckgaQ`0lJC#A1>3$6Ry$H=o%_-Mvd=-x-9gv5Bd& z;1XO5tZuz3bj)qUae)Q6vm%j~a|jPX6)7O0;+9h;PEI@;JTa{HDGTSO_5W1Tp~5t) zcX&0Dcb$0Ondhf#=|V!1w=L)Nicnn$&#kaz$F*zcX#TvwPIKh4#DJtsR-Jg;N*t`b zflE@vw(C~5yxY5eeL92JwBa6B>|(Bi0PMi8#&inEk&5F|)#(mr5W{OS z`*86737nQ5DAuHZRn_i{WZFo|0lp;lE+Mw9n3)&3HIpo73X%$tC@l{4R@cs!wI1zn zHx@@*S)&it&MnQDe|hxBwkQss`Sv|a)Z8^IkpVx;FFW6xr2ii*horGTlPzO-wKG#{ z0mtBBaqhY_CxA26l81XPffvB3$rkYdtPOL&{>RP!{*VU9;}vpu$Iri`@72zUOduYe zJ4ofU4&?t*W;|dRULk)=_snY-g+<8_~@Sayez+656UE(3jhpy&}OB7+;bO!`tGO}a4*X-mc;2-zhDsc)zV|Ha-a;zWXs&M_pf7~ z$w33apbM2oSB@Ja|3YQ!>7MO>><=JroB#|85BXkl+*m)*=JP59n<=Vt94h+-gJ|v; zC?7NS1}JxiEjBJ>-aQU^od#eK=d-HY$Bk{#1E>rqj(_*3P*c%70ALUURn##8Peqdl zBmfGK%cbnz9f!(Z0&92LsO;@=V~}5{%m*oDecX2ayOsZL<-bz-uT=i4VE=!hV0+Sc ze*PaWfd6qa{=2RJFWahEcmdP=cWJp<9WlO1Q0UkTlqGaI<6DL>=m zZc-TM`(8c4*C9yPcM# zFb5`w7wG6>%F}!De`q?jFlvXJiW&FZtDVMSe9FEuKFb_HzJo>_c1C%ewl`aj|7-B> zlgZuIx;cOjpWa{+$gW#N9ndn&k0U&3?CD?Wdbg9e*=#l|^bT6=hf4J8<%4Wz?6MVt zEy$2D%bA&(OJxN)s$@jsHQV-gY1LNq>aC3~1NCuF%gs91ML@rs)cri~0Y+0GC#kc3 zqc#5r9OwqT1H?*iX6;681sy5vtrCTpEc?schxEN(n#3l>>paUUX4%Dzt?xlX>DF3P zP9Q5Z4|)K?h94`gO)PHF-p=dq%@7{6X4ltVI-rB=UdjZIrQCV)@&xk6S>ivHJI8I> zfrlYilMH#fbo|L%EcYYq)V=hxMxO|_FR6ZP>osq*>4?b+(rNZ6P>+}KnNyN%6>8V~ zZ8bTvfIvzCks;GSXFW=&^$fYsbM6P{e0K_iKg8f6NAu}-tlKqB6m8emqL1*yNx%Kz zpp?5zzpH%?F)Eemv&bm8p5hb`mlm;Glf@bi^l8#v?DJ6idI|`Gp2BV$zfpwIXUapQ zN43+4znl5-S|P0VohsciU$6A_OH6bHX9uWcFZNHIL5ew{Z~U45$?@Lo*Y<-w_nmc{ zd^a94l7l9MrlgS>;z@R);a)J?yln0FFjHR@!Z!6Gj7Gl--%J{Z67}we{7gTX7EmV zT>ax}Zfm7x7W`3v&ZirJE}BagYDsdUJ@Z=G)8j}!Z4g>x)0?Je0+AFegXC{V1G|64%e{{Vwi9g7u5!iBoCnW9f6|C!fAbI}L6Ev@3QOx-+MoF>L zzCSV~UE}!XOZQ z)O>rYTgb9Xz{Hg`_CsApVoC)8dnxT7+jH$|bFD>AF-cIAX4`OkHiP zuqcVX@4!rpu^mudIMk)PNJR~kER0XkCykdeYvSXGLozw&B&+A-Ks!yNq6AG*OMMc5Yn6LHKvh+keS+?2Cs^$QyM_pJ!+0}US8Af z-p(@yfO0Q*GuXB1Ho(*Vv#ch7bbSv+6zT+0AOjXgm}>B5XDtp%?cmL+FcMcLuA+CNdXTO>_S zNC0yG+{GOQ=DJ^_uR>kUOUt4m&f~C0*H;c-!N9>4*Q7ke`qs?xU}(K+ zquiRLEOFv-SSol$G7%tx-4|5U?FXO-m{vulKu)7g z#wKtVF4qH`>HTm!|Ff*sOXP+umm4nh+w$%KSw}nm6Ry4sjnU-*BbcR84cs6(w9mz6!-SeeFIR_{PD|f~{RaPvaqj@rUuom(H(OESubc*8;a8SVF=a1l z!&DZcqtm<2a|LZS>fqAI@=Y~_iGjW8^2pbAz0Tr)wQSeQ%x_Ch!W_Ryr4;Ne0fZJt z_!YM7z3X-9J|aiOeMAKFz_=7Sn*sF?->#Y~^{A7ck|<+a3}NzlxC84uZiB$DHIHA` zecQ>pn!=PD?mMIseLW#}!8q64o}+(d2zwF(Vt{3Nwn&hig0amWTlR~bqkxzI$zhb` zaq~R1PWtHm0Me;*0+WF&=6kyds7n(%xmUKLyiJ+TI4Y)QDrbNv2gCf7+Dbw5OPzSiuotzUSG6hf)U zGg)Sg<_R_JzD11>?!O=aH4kO<-yZON)tpMsBt5y+@ulInqxYmu@?ni*65^HpD?4Vw zDj&6&>CGP<&m6ag8(GD61JLtj@$uIm+@mR2(TA+r;tNmApeI$Cms z*>Y#Q@lIkInU#dYJ%LG?d0d9V2S3byeNK0K=kAib_tNOZazIZu^Q!eNQskJtI1MA* z9{OFs)G(qUt!c|cNb1F++mA8Yn$VqztC~zEI7sxadp=O02e8u^i_1dl7`71-aYHSO z)Jgb5qLmuFmzsdz|L_&svP&(XqeUG0J986Y9O4;+T<+bdWc;Tc`;B3k)1*AU0LqgV z`T*#)T)3hVfE3+7nS*iGgs};}QGA+dzwjNz7e*X*$mH)) zn!Y;wU~pv8uk3lezdVY?poZl6fjhQP{Eg)u1$+PK^=L;xnbvk zL5R23q{JlK$g@$At`sEA@z9aqwUv^MQyMUN9!v7=S{K3hl<1cjh1vX#DVpFizrn=r z^tH}j&!VwH2xq(FIYAOAJQ5-p3RGd2oYNJH@Oh-T9c9(~|FCmI2YH``+lQ&Bz11tgD>|2*IQJ~>0)J7su$n~*!||Zc??E?yNHF_ zAnKIMs8}^BBF6|tq11qoto_>aN&>9C4*1QHYP$pmkXRScd`cU}Qi72vHL7}v)j+0; zcdQ_Yk*DZx>?}D)Kd?uw@2%F|X zD$;g-(FP@2lDtRKLtw-@6Wq1GDNX#$$JfHGnwfv2b^<4T#8IAd+7lHqeJtAUjXEzU za$@2~JI%f}`FV7|437$1X3`=}H0U!hEsyhzAmP{;yuu=}YDI&`2W^ztQc_PdYdE7sg804Xm#S75EWNWlQbrJxJyRP2nWf>;7|3 zP=UC}yVmXDS8}<%b9P%|TJfx}nq03DF7mOZK4E8*XqsG3oy3uMn(VTJ9%MrntQZ9* z)?Tx*Hn`NeTW{`YNp5ybdl2BvO3&EgPuAI;y_c^}5=Mxzmjz6|$&2$VvfiChMS0@r z;&?s~D$0$T!_jUr>g0|{H1wBpIT-k7btS#Vr^J*J<)J;ZPSIJPsxpx;n zm02|N^xa(eXWUfr_JzJ@WkYS#A|Hql51Vct7TmN8!c9?QdZSp^yk-8)dWx%Dd=Hoavy+b{L~A}@X@`t5Na!Lvj_ zJ-5Ocn9up1Zd6*lY~hnfHF32mhz zMcYyblMJ)@0_LOHE7>CzA3qPTR2*T0mPzj2$pRKcxPf)9{1B{jfl2N<(FC_Qw_LjX z187iA$ed5Z9G-%NHGK{vo0160T439~0ou)ubvEBLY`YSZP2lCi;vuYRwN4{Vwb3?`cC>9#$65MBrL3 zRH;i{@U{F^(>ebwk}3Q9=;f!uVz-{Oe*1C{^_o-LzD6$?{%UpN1z=YO46J_~^O#ug zYCYO%%xVQH#CZ>QJ`l^rwY-V*-FTmkD~5wxGM080*6D%E;yZ?pn6fq3T5*)+X;!!S z3(Vs_>vB|e_5FkIJoy{77{%<;WF*}zouQ_6Y8UQF*9|#9KQn)#DG4tUKS9K!gs$99 zWM-UR#x_cP78J8FZwU-v{E_X#JxHn^Yl#P^or?3%4pq+;9<>NQ3O%{=_wQv}is{U` z^mcDj9-oZ9V|*Yvrs-rTeY&k-d&QR0uw9Kgu!p7t&y5hbva+fu=-!TMG37IFfQ`-2 zbi?*?uKVQXiTh+y?mA2rLX?Ii;QB5NOQ-k@VD`CMTq3~$6>a(cFH@tuyP~l?du6lv zh{h6pi;omZXBn*4|~<5`%N!Ts-qLl2i#ZzxVQ zYb*Fr*%H4@UpVm+0|_8;8{vFjOW0vZ8`7e~?b5`0$iDYTlOOoyD7a;;pa2iJZoK5i zbz?wT<^0vZWs&L@v=xWZ*)%N?HTUm#Kgmz^dS{Sh56Z?o_;@6`-mMa{K!{tcQBR`A zemMi%Se5eBvU~>UA9d%2SXo zDM~gmG!t|l9S4c$5TW7tmZGU-M+M{1w-Sa!g{3aa;J+MxFn_A3n~0=&(F=+2%(+)r zS9Ef_%1-F~Rw`e@KH}7*=YpfnX*#iLY21i8uY={Suy|uid8m&Vx7Q%Nuo7PK&vpgT3e(KyJ^Nazr zAvUPg0zx>-cTWHrh@zr2Zi>#!+zR!qP!21jEEsrpJ0JC82bLbMG2xuiv&3&-x*wyY z1`kgtjT{|P_L<6BKu8k=faN#8L{&Ug_ANZoIW#CuB`Erv+e(@C(knZjTfnY-+Wz>_ z-7&XNykG1g+KX=k2my4y;I+>Ic_D8FdJ{@*{L?PuoD3<>fBu-_630DW4olu~@cM@PZ63 zw2C`EP3ab@cpl`~1<(m!!t8(9R{k%i)hoVd;dZ3D1nWj0BY00)ZVwHp<@>pQm{gHI z6VeIo|06aYs(2r#|8ZH0%0$F$4O;kmsD2|Wnk!|ZIQS4_*m%>DJE za!|_XQUFlAY^qSvx?|b@M8@#2d9*~o)}~W;&v|s~q2-JI8SZNTZPsGl0*}0vQ#B7R zri5thf0KTuLOb~6@<{*@<1s!4R*){*Q6VCi&=jUJkhDGBk>Hxd|f}YmY4=ew$kMpP-kD=H=zn60$Wu__O_DZfvS=R2aqf7v9Xx z@}!t(j;1wZCT~A`AQr3~_6cSLc#$~C=&glz%ry9d_~fAGKG4ZRIm7AHZNN_!X%u3R z=ewaOWR1KbwQT1%H{DBx?Vook$d^)3U%qf_%>xQxjEhxaj&t4whURUUo)(++}EujE>9@N(r_dMqFbql(w z1?}#4of-GF{u&~5Z>A&em#d>3yAp9xp;Jd8`ZD{TsOw}CAPaN7S!4b|&J*B=>)b%y zk_+RmG`o!aqNFUE(IDeKouHSL|JGuGJdMST-yBfzwr~cKDh;~Q5m5OIx0bJjSvB+R zYg~)^X2T}s<)CUI$!87GBY&fPNOb3~Z)VN=9ctgC6e0-N zngpBl-Tig_fqja65UJ()R~7U%csOn$E^_D$zcY1(T5{vMi_m%tDHLb!ScxeB`|K@u zqy7eC?qqvu=g4zSTn~_a_Op<`)-|}u4#FeYtca+#E!atdKg^)o=LZZ6ixvXb@IbsV zOO&MJw=i5L1 z`Ij$Ur~>kEVr0i|Kyu_?*1-_3fgmH-<3Lc2K~kwczn&n^~kylL#xO2C9rrML7Z z7PqEZH*0JKzD>PPcerPl^X`Vi{p3653;usKSgJ2ys>f$HQlGMCEi=be#x-qjG8ptF zD+J5$1q?Z+NxHV2y(aeeRHXgUD@jR*8RIpK0xnAE(D2NaJNA<(__uS_FdYEA_zO(A zkI2Lf1|S0WHg^P>WS93hOTf#f$dc{d+lJ*%jtg?}&*VV=^m{8xU;Xj^{^)@)Pbcpu zPLq9lbbM0|tmz~*%%Nr0IW70wr5cJls-4M3QhBz#ackP@6&Dc&TCq-hVBs7 z#E`&4?o#-URoLXCZLE8S>BNu%^O73ZpKIV7Lci|@xrYBNG4g#GO z_JsR|CJ!xE7I-#38j(%Ci}6F^S*7Q%Z<#+lXFaI%RT&KDR*j5|jB9}?q^L+vyw9Il z*|7}TD7NYOG4}VP_nr0bKrn4?pY!C)b8*z{Z%^5NjbPjmE3L)y+o#(HfhN1B6M>@G z3%wvt=Z_>Z&=}>?mIS|+{#o&zXr5OV{q@<_wk@i>3}GG{s0M8Tj2>Xpc0L62Iq22f<@j}(Se|5pP9&s9jct22;)o0MfzEowy#{vE z4-aVPBc_$+>^y@ftW~TS3uQ6TsGb~24f$2H>E_=aD2H4!U>-sHK@&UB=i~X2)&&6T ziA$++U|M!JtT-WGzx!(WevAwoz9BRGw--D?eMl>k+CL>Y(HCe~~1}1?>D-n`pEJDug|LPH}do@fTgb^&WQyT9V#& z$Mg1I%R8$hH4Ch(l`;CU#PL1PO9^*X-d^E%ELXm`B5l8@@@Su$S0@{$@)Ew*Eq9dq zOghX4nXdnqSxE4E)DJx?W*VzKxLtZ7y@>nqZ^Uk@T#z@V?6D|>N-wk5s zR|Nu-ulax6{inwHKN0P}OBHpgVV@knq?H+lWc|~M|3CkcZwh1$zX))Uy8HWS{?GG5 z)qd4$J&1!G_uc<~5&-M}S1V-m_M7%F9rfvag-|Zln9j{Br8bmyq{rqQAK%l;kntBv zkj@+hSF((8?_xZ|>Ns2#=)uiB-<^b*>-a*&kqg98&E9XbNoY*UA_^XVpw2zu4`HaI z{$J19!&1fcbA2)F3Y?IA#$*P(kea6EJK0HSf=%ygkUBX6*O3#peRCP1qkeR07a(=`4`I^{b7>sE;Sl2HqkRo6Hr{#H4_mA8 z>E%y^I|f|IJjV4KA#lfNUBCPQ^xMx)?b81%$rlPk0!A+{XzMxF_yr3ji-RToEnNEJ z=XFWT{BkIY*5Zxm;0hBvfw$4o&2v~(SwyD~D-*W@E2 z|07U}=}c;9s?m38p3QB+N7gQRRO#4L#Z#SA>#AK>nerSIH-?H_i(YcQ?NyqRHfvg2 z!MFk(Ov-1N69vyt2ozGW$KHU><2!DmWjfz!^g){O+H_xIRXS) zIA<@~rAfO`JHvqz)Kc}FVk4IggL#?_3=GW}T~srtrr0^fQ&e?$?|rh`tL}t)sjHQj zIgm>oUJ|WN{pYZW?eoC{P!|0+>_#fxqprz&Jl&k@od>#-Py@vfm7ll#Kq>8#7OvrT zWLZ;8*^bloi*G))CBYqk@ZM;8PFpupV;8m9GQvVVeFoNAlLV`u=h54jr~!}#+nUS3 zgm-;@qed4!=lj1y!#ZYdx?5&F4ibIpZZx2&5!Gr*Gt8|i&=J3<5(N1Lzh*Ab7<*%n zPo1oBfzhPXz$gFbC=s4tay5ACJ?JFNxaMfnH(AKgM8aj%Q7&g6|G)vWrShMD;SdY0 z!T0(}+%m0Av`eR_iPz%*pl~N5U?*D9h+4{X;WW5$tL38L66K6%&}kap$}So!B|GP2 z=Y8$2PBi^Vn1`}wmkh~lZN#{Pt;^3bmtFcIFHU=c!&nPi^EE=Ja}ydt-zTf9$q}OH zcttINDt!kjPQ;zi%8>V7+02zC%^cmac5Y{h1pZQr1k5Q>YAnx4-BIqI8|tR5deZxJ z@e=juhjngg4={Ig&6IF&@H1@)}q~P&Hi0d=(@r8>%o^8 zZ+jmxSw?B^XOO+ygi%4Rt?;zVKXOn+*}iW&A9+$ud{qAiC*SeF$@%P>BP~>!Ya46e zbG=ETpZ(E6Ib0bMu2lg%8gE4)9cTmJF@f%$LAD4u&ceENq5YG zo}F*@QBM(cu=r@vw!b*_;j73p-m=VGyrnqwvII|5O4K*W?zy zr^2mf&rj+0+e)9-u;Lz^#ZvYryHzzApEtLY(|mtUJx4TqiF)dobNB8~lgZfBLk|93 zxlA{CrObUyfR=&~rhLo>5>zHair66(kvae^t^G6mr_TlQ{xkA=4ZsPO<ZB>vJ{EW#`IuO%oI6yjC(W;PE>m7FI3bel8>(5yjGj+9Jn&>X-{G_k!-J8{&o&4yghg914`oGU($i!Xgw z&hX!FJoWinZ1z#Aduw_tekTJp@L-@snW^>f>G8&1_V7&^EW+F|)uL(s{Xw;Fk<+N> z_~5Cl#gFeqnCZ8K4DzKIo=G=(7=DkO_ z8m5%hWCd(T@KuR2T_cxICTfR9&4z_X&8o>R_ikNzi%&j(qs+E>3$KUiR^YMe&*Riw zq8_pBTnH1fn+QsC4XSPj#oy-k%m9UaFR+Sx6^u7wYdDx~42Cjp7mcj2`Z!K@CZ$u@hR@#5bV^7Am zd@U+)Ybn!haXW{gEXPyD9_>1yRSCO89+QE%zJZmP^9;RNduBF$4b%K(*uJokLGXkU+oc-^M z^7l?kkLLxy58hpp%nr(3rPvG9E%D@*KCQ}pvF29CYgC!C!W;t7O&RNKM^U=;!8Vfz z+&fmKH?BZDdILY_I6HRjUBx#xS%?m3v}tRZ-gKg}a&Wn1E25 zvL$J~*tQ~Uln%E&E6i%%5d}f#8C&~Rpi)4&k-HqqK6xDgy@}3U2=N7EQJ!CMb9Mag z-6br}b}%(EM#>6;7-%U3q0Bj;>5|E&rsCUoa;(=LQ8uRlveTz*Gx$sypGkg2w3ou( zjLeEr=9s;3?&oiFe68K)&aO%AlID0bC$06-?O6OZeoKSM8QMP(rLdo>?SF+4wTg=N=Q&-Y%DF?iGw?Wg@w`HSb1A;|8%>--8V3sz{IPS(hmx->&Hp%JmZ(l5ZZNKw_lRHcF$ujwdoLaL(1`ij$MD7 zkwRqYNkmmE$jz+FX59X!K~$Sly6*A880FNvRg<_ zxwxF#=%C)MeV0!|5ufBxL4zyBtx?-@{Y#%4S4TLH*1RTLk**Gj98Hmle!6Sj^9u?~ zTRze2kOFyfHjAsgrLttS_0e016nv;tU9dMotC%KFgPi)PkieMf$;$BWbmg}PXa-3 zdv?T=d9FW*VXne9py{0L=QD9PY(9!tSa+*$+SGK*evb3}o)w|39vj49J5i|AJA&D= zT)@SlB_0jE-J5UhSXPg@GkIhz>Ng^oCgBbAWV_q7%29i$v&h ztiwSC#tW)p`4W;zN!T%rn+r(m$a5}n-a)9o)WQL=4~+_*T?u3?T)ngif7x8S6EuF^ zPH=)Nse2*L&g5zQOyP~1XI;Y_rhHncS>>E0!FQ?&@m*Q{&VjT%AY>dLB#)L(OM)A0_sf zRt^%s8-P0%ATS-i}h_(ZZ;0S~2~+DeP2c zjXjbMSM9tnxDu&_rI5 zkwqUpF%+_EHDk?<$q?o~R1Vvf9A9x84!r37+RkKF9qc{twl$FtM+ANqg)&YzEwEym zj;0OERZF_XWzp6{gl!906eg8CK%>)fZ92Vq6_A?dFQSCjBGC42%*xoxyPVyO;%M0S zpxa4;{B{CZ)@$DV*qZh3+w$<2r_Sy5^S|j6;!e@O1vVZY1Gjw?Hy-bLtcO+)a~1H< zBwO^Gc(lyt7MpL57@jN}X~@Q$_G$^1->t6*-0j2y6kVO|t;WpIrT^X}&eSip3;t9s18gV>Q)$+G?UbZ;sFU ztSJ>K9KdpTWznRlmI!#VrZBf|4kwPyZr@~)vK(-pBI1y~edF$7jKkqeZi{K-Ld~=? z(ra_#le+%Lrb@QByw4%08hhU>DJX2443jD+am~p63CVBU6Nb6?IDds@yE69dB}m>n z*ZyDIaJWQ2K$G~bueFO4g@Vpx;x@HYw?~3Ez_~Qsp6IK@Qkt z(D(>lWir$)ONu*{nZR@8BH(j~9y_p$6O~1y-k21=t|H16;!DL>Z<@#ZufHbBNDkmH z^d7=)qNBd1aGyp6k_tXNt+>H-D2U~l>#YNk8Bv-0M)1Rs7ZkD(5pmJdvkTFaeYinF zS^&XO9By`u%aG?ZDsu%QW>sL%SmD;4v0#=po#v^K0CG+6>43%y=9nD_&X|>>Syj%Z zBSw5`Kao6fLRy0XTU*EhJahEmxCW_W#*}~8lpjD5IC%;Z{kX7h^RL3T8R~<#LL=EK0q{5 zoaqmN1gHM_8;mz6DN_unlqLgYh8p0xfG%V-j2xb%n@0Icg{U_{^nD5<1V0OY-pzb(Xu62ioG)0wnRQ;~ zWtQu+LgeUHFgfdpxN>%~yxhEdoqrUMCdAQcieheOdo3U>oen#E>$d~L9jrb*&y-NE z()e3T{97M*vKdT=I#OIgXbRu&8x9Vuc~pqe)6lEwV!1HfF|+^Inpaf)KkX5t0%FG5+cO``!QNzVG&V`Mi0K`;8;=b-2#&b)Dln z*XIn+>-|?0Dimf9J$VK z=1{O1qm(Q^H3za+*HB9^rXE7-!NaPnhcL_P!TD67W@FM;R4CI)Q>xL61&u}2iqC-;Z-@XnM-&%8dRWTSej`nxOab9nQoiCLuS*FDK>5aDT zKZ1C+BKA{Efu<}EiEQQIf(~k_rT6wrz5Q3mPuU%d@?~hv9PcfbzArXUO0i4dksehw zEnh!stdm<2vJ8a0y4~!`x?HdItZE5U9AaF5=oQ5JK5KR34Thqwxm!DnhR-*MeR=ZZVw~y@PC2m}d;2pxTyT?OA zenAx04F*VO$mMI5t((!9Pb&dkZP`$@Zp!3anWnlZv5?soq0z_b$&htrh3~`2BJno9 z6>v?lTKL#zeflVlSl6K*vib6pWvcZaF-i{$KeH+kPt9vw6B>Uy+H&7@jIY{ zSjD$4OE03azTBJG8}ZBF@fX6jT zgHqD<>mHRSx9i_FPfM9V;;1n5{KlVJ#}>L$7`z3LiksR<$*Cjt@ahP zLS%8vElkN+sAk@y>x5~y;N=Iay9qVfY(f=@J)RAI-owq$xk`I%N6$CeCV=1V4HhMS zmaeRG7$|u#8uf62djq4h+zRkcEHq3}#$Zk4<6o3Z?%DKE17r?ecEhI6O_R!J#6yU8 zuS#q09kacFdrv^?qqIn--*nIjV-o-95j|{RGA704@ry-V!>A5B`oLo=uekMYP;S4; z~5K~J>NN9WhhWXpQiLb0`czw1+2-iQ?K$N?*}&DXW^8t(-) zNYm>i4U)}=fJ)&7lUbh^2(V1rHO(?FN5jV@7y4{@yO-=NP2>HYgDi!IJV}qQSEq^@ zGM%W)qF(p6@n9Zmabh_>@ngk%;YS1~PQ*KTM0(?Tq%GA8YR|uLg1}CE!uJUYfjDze z`YLv=*@lce8s`tl)vxdyV=bQ**|wZ~zEx6T-3ozjM@5=%#eTW?voC{Vd-!u7%&B!3u_KQ> za~-<>PPhaYG5dLdk*c~Y^3+}jOThZP(U*Hnt~!@=&pEYy!dRxiSV?AvJA_#GnAa62 z2m5Vdd_MANYooWGAO!H7&g<5_XOLTsTiRhV&UJlV??LN546*yRmh{(_cT9!LK*`LC zc$W$PjaZ4y;$_#_Zm5ng{xs67N!is$Q@E*lXX&%0+nRW0vq;Kk3ZKHf5we7eIR9wq zK#%@*5Zi?9_o~M`HsPABveJ!OBGeqaD*&jw{!1qtGW$H%tIbGADW8(dt2F;|a756a z``v_Xa&i5<$Y5IdI*>oin_9ZO!buRzBm#h^`qJCC?265rK6^gJfXgTBFexMAd*X;+ zFO7T)cp%xz++Q7s$voT*8|Sq6Nnh4yYcGDj!3N&_Z;$O=|?4ygEq@Yj+>>?HTGW~ zJv?zeu2r2Q)n?-D`gjZ}B-?X_q?KgTqugDy6|na*fiM$5oFQh~;D#0Uds6M9?^A+{ z{YIA_k@##Y5p5w3WUKUpyCI+?v2EY|eA%SARKoIdz?(4xI6rnSHK`KwGoEG8WA>V~ zDsM)MsIGX^Tz}|X>d#Tkh?dah1;WnT!pRoz>e}*|y8MJT3#VT?K10su+&DthI|_xwQjOPDF;t3_8Cp zs3abKXq&`5F$^*0f^>D%bpVvSb#q^P6#KCa32cIYYU;WQHxca{=0+|i z)5w)=TxGY%R%qt~B2#6At9vajOLXxLK+v>t3&k4Q%O%*DXYI%;yAZGB-f@R(r0`XG zkoz04d_vbFJ_k3#68_AGT%b|^r|GmFXT+-T>H-iex?PFjT8$=pS4mJ7dJdOOt_BNS|90YGBXUsBp`w8ik zCKt4SBqd(CpW%fQiY`EWoATg*yl-C(04%mr8nA;~iTG2CN?>TkH)Fg>|!_8NduR3R51a3B5 zuHAEHVG61szkyL>@C(iwu)RXn%Km&7E|T$&XYp)1brqzn$T9N> z{9;CnF4*6+BwS3Qa`hx8;XPiBj?~63=xQIHK-$o8*h`tSyJlDF!R+&62G%nYfH9i+%YaZX`#*e9X~$ z5s_<0pi}4Pb95aSV7)UmpD9iHm*J>89sMbiVlkl=gbwjkDqO{P`V52~%YX&!epI)v zJ@qNJ_BB`e6iW&G8AzhfYL8HDz0?6<#*Px62=P12HhiK49${;e zatWu9^v)ak*3UH(Vz{PnEvizMqeYLW%x&oOKm$4Z23kVVIFWLX8anU8#FJnZA|$=p0XZ4KLVcXl=j7O7vFV9ypwlm$Y%tsgYV(jwZE?ERxl2QE2szq$S!xMD+0 z|LEzI z@k`i*+SzsL^UqB(4Rl>(Oiu4;-~Y<-B5Ye4&M< zy9a7;l8ETQCtI)YJ3jruHK%0uZm~k`?%@lBu!G_y=Uc_gh2+{criwO#Hy6b2!-$x@ zf2BX@$}0tzylr{ye4u=VX=9$+Cwj`37yQm@iuoarQ9u9cHhYPVsJI!8HViy>%}ii$ zvZPGP;WP)YRM5{hD-6U=_0V-7?-KVKZW2~FB1^w#BrOW6Mok;g(_w8T%}^62_4)n& zdMtO+jgaPWT8NQx`GTmAqLplqNr8b$qx_^!vr?n&yXgXLV)A2G8YER>x;L5R*PDBj zf)V1H&6t)2ktO17H=$_bCf9flB_rqU%VIeE?UC5ieM;o8m~<|J@{pFilPOH>S5%TY zzax%}cUL{0p)G$M9$TCcJk9~-z62x3W^b4E&g!k7sw76HhhQDM)XYTD17#m&UhSXj za4GFYxspnzl0OZC?72qyNV-ncAeAitL3C3?@K$UbG*~Sta@O z!~7uLjfKPIp4K=ky4RmjbA!MPLNPbZNZnlbdI89JQr{%sHHAf}_ibC!Twm*VY~K8A(S+U;WtKqcLlEr#p@BjAG_%lQyd5H^|Bl)_ zhi+kPTke68#5y;Wd>G5f$-B81iL4~8# z1`)LQd<4^ur`>}Qniy8GU|D!eVt?F~FyMOYGWV@GD-hC`gttZjOFhnXEY~5jAUWzp zJzh=?A*-TQ@#C3xyv z%GJFZyTfj*ycKtjmfj@5n~gGrU*uI0UOQtK(Qtma0@IkR%Z8sao}uWu+RWVpX< znbj>)zQLQlp(jQ-A9<)4A}Zdf+gwqz*lK^^YdI)cUiu~9tx~8`t7z=~d~YqDt8JrxsXSZh@$GJ$YID6TX->3P%H5+Bhjg~dh(17M$g7W~ zsAjb-6Dk*Beg*_>kP)xKTHh@|f2?F}#)SX+KGrmA6Tgt@PP5HS3zWYVnt4f8X#Em6 ze26*r@{{@QlZWp2;XRWZI#3a1Uj8S?IJSFO19PGGC;}>2NQE$+c94$u33B%10mk(e zTrBnez7!kcoc+MeWpJAJtm96}iWNTAr5QTJr(}0S z|1H}_g;&0DT;EbGZgmVG?|ob_uLpyge290##M2S*lT{Pd%Q8-r&PGPYc9F%EmY9Yw zwANRn8=A@(fzCSzdXF(ZG=E4q`FF&Y%rmTgI|}|b@2l^=6POg@(QmF zUMbVe^cw`0l}jAE_0@0AVrD-yk^1Nd{9WWHB9725`tpPNW#zaQa$tUWc7KYjjKOwr z6$LZ$#8L%wOn_}}F{T>X5xMZ;Oc$33&3Jree<>o>G70v+XJM%q|x_mf`;XAxrGN zb$g7#4c{z;_^wsoo4T#B(O8UiqgcVtEu?O#jwp;x*B$EtuXrr zVQ+zTv+H;!nF{*Obl{tpJ-Lkv~(DYKB2nIv0|=mqz?*pFR4Y+MSzl3#0C|y zp9xoTJ`PiJ@5cYI70+tMp>eV+9jAfTD~jm@TpMX1k7il%?&#@@kH<`JH!AXK>pX2- z@7QDCYr-fL#+wMG`j`nQ84bM05-V%gdnxN2zF{RBA2e7S z_MHbXCT~6aN;q=+x2{q=Ig(0~){ur`8>qfMErdE9IaA~121@tRpGnYF98-SZ7!6fj zd-KY`I=3Od+T73%X|fWdl&^tx(J+U5c{IDOL~3NdtxCk52Z!7^BU_;-t=qFsmv1|w zfi4+4+?G{2aUu(6b$CeG{-9fsZ`H>!ph3?8g1IE~xHJ`(M80uUS#Kz;r4sf&zt8aQ z@>XUO!CO(tx!A8Tm33=IW#*EBpG*SpQ_XCAicg1SxRbJZ#iZ;|tFjB~ZgBcCK3O8u z-RKyc$&qdt!EKY}+{IF7sSIr};hh!2V%}l2>~Fo7(Z{hm6sHS1Gj8t$@Fy!xkV9Bk zHPDiS3Ry;~w3MusfIQ#~Sxp5wo5YVt1t`~V^ZnpmG3!>3k_NY%&DukY6Nc_5++)=5 zTwiY04Q}D{^y~v!N6HtFr8BL=Z^-;+8+Wh0WyHsUlG0dd{XDfqivrv7hIWWua|6D) znA*jCECaxd-QS#SxxHLO#5W{X6DrBL@y;_cqIJ#k<_+sXl-R=K1@GdrUiXrk*ai-j z&bpT>+aE1^Zv(oPe|!_}kClnaFo~~MWPLO3hs+(k`^h^)Cm@6Nkn-jTeMMm_$ags2 zR?>6Kj9lAm11K%=*4WlPZA~tBDz^I+Z%K9m9uBZV2ZPHXE?r8s)56UB<4?pWCKJ1_ z6BCv4v?q4M<+;BwhNSVGy^OAStI6sjq~0v`L98I%mIsh?D2)u}zeBRIT@qT|Nh(M$ z+qS6w)B=JO?N=BlFITvnE2z&*zw$L=)nf9jh9TIXv&>7**^E=p`Pp(@2SD_hP*O_C zLT9FBzIqiqwREm(mT^^w1_%d_d%ff;Gl5IJ(#DRWy?N3lw|VSU$!>n@`+`|GRQ+Te z43sdyj?`YC$+zUDX7tFOKU1wVCfYmXnKD@29OC9w(UMx-k?4?e(?q1pV&;kVO8+q^ zxIbqLOAK`Phs_T@u z2Ca%}E3g#d5iq{dUPf9vL3rd<-gNtRb}-jy>P^cdB1pH7S;;5J^(|G2F8mnJTL2j> zD=nqkTYAv@Vc!o8UMRMVfqEk!YIRSe4V^#Hc{b0+W)Z@%k|@F7T*ZZ# z)esZ3lnEW5K0B{w&3<=`ChZ7~F|ieGxq9KEN5X7n_rrv=m6HxWTInrhZ6F5C_cTJl zyTO<>aLSJYrBxXl50u{H!^4`CmW`A4bKd}!2HFqg;Z8*XeLMoNJu>DATm%17c3;n) zVU*wXXg}6|dJIzwBHM;FqaZ501fibm>2P%Pe!*eGzh{82}L#Gikn}MW>V2 z8+IFn6}1sWbHwpaf89|`ryEHPJUjWvw&Uu?Bn}16Shb2SwsK zY@KEv&p;)7L6S;FL?YS1x7z-o$tq&n?y{V#(YQB|zT1Ljkbl(=TIpOQSz4oO|8O+q zL;M+#+HZKZ)%f-!o}yCj+uRq@UMMS0Y%#{f8PL|lmRtyrflaA(MdtrH&cc64{Wp%Xg zvSu&{W%BOa0Td(ywUd%U&eh_tiq_*RfyBT&x7LE-&pYje^h^D=`B-E@g`%YmN&B{e zsKTef(tgH^x@w>&8Qz=X)7#p~bdKj$a*!PSl=ah28(e$q)Q34v?^6e3o!?L2tQ?F02@<{p?_^G#MIdjd}8qAOm zWyo22b+N4gvzW^g*KL03%z}cAo4Q0jDt9TcE>YiE`oq(?fZ8_F%=N+>uD`r9ix$3~ zRPix_;3d=RzD;<#s-zGmD#&Nt>by-2F@t*us)75adCFmx$S=g zQCWqveeTRju_1ESgAYuaCA>lgezaHyA+$ z<>)hm_+{U#4T0ukuZzjM!^%4(`SGL&3LZZ4qAB4>HH1~*uVMR+(x8B&GF-#fF8gsj zS!Ms3MNsZRREtt56Hw}*)6=prQPu`5L5*9=cl|xt=`rDnJx}1;Tl=ZyTuoM=h_i&= zyS_Q1zg&(NIyW~te6eOiYW zQJwV*7$xw*Te_hqmISjkus4|`Ug19Al_(4RoKo1&z}`_SzY2L|uQi$!hW(7DygEAT zNu1p3g}6o@xIdV!8H}_U3yIzmn=kb~=a{DOq z*iMjc_u-M9Y1%7Clh5jq@x{Y=!Va7-DMXo40|1BmFuAA8&i2bV&t`8+PfVdr8|uM{ z5VQi(==99M?#}=~1vXMf`;{$}IK6r>FePVhvc9Y*wu3TE` zj@#jYk3#J|-1u|(#vM9XK3joVoMAq?YJG+mTw&xh@uCZ!rsNY=ow1OG`97~c)t#uM zUx){C&L|N4zq+F~Qrc3bL1(z+%)IRS4SuG`vk>li>WzE#BhSFtGMA#nkCCi4r)4Xj zIzvmfKrapsirjVMo*^l?5X8#q-gT$b8!D`?S2#ybcJnTI6=LhawOj!ABn80_giSnq ztfw8R9!_yTYA9P3S(sN^Tm$HO5YDm2fms$Ok9Xq^*1lJxD$IH8cg89QqLDH;%fr4$ z=tK#mOQm=ZyXq!~9FQ27`OwwbThdsVo?fw~$|UZIQc#zUPqf~&3bw~}>yq?_|A;`h zvybxi?#oKL*ffME_K^XRll2VNYYZne5jj{x6&SXHh0X~~8B+)`V5QPy=vRlGxhDJd z85`49v1rHs%GH)-%*&>7%Q|gde18F1Im4+dVEXnxOcQTu?_23MgWMzjqP)~8Y1yEtGLfXq8 zsq;>QRN%Zw+_h@cpCTxX2P=iy#4XphaNHy!aGvcyUUTFoZdGO3ZBQj)CC zJ_ay7H*+5CPiTX?oVnCFwtv6&zIQEtY-gk9)yh(%CU#&-J! z`^&YX5Kr4L>)E|m)Ul7cIks)9{f?%?+xacTjx@x3bi^s^t&lz@UDb!6r(k(}o4D=Q z&@mLklYOG9RkZ*)>qO|Iv*$Tx?_S3Vvn8S5CuBaGcOo^~_U)s(QwC%ra3Jk%#~BgO z(evhShBpkz*e(wp(qqj>JN1vEZB^2Z$fJ@E{}IkxmXhwA&L=0N?AKQO*V9%bOgK2ZOhKW(&>`+pRaGuW^qP!Ybg>AE~{^ZI)G$Y9{u)!b( ziNEu+>VsWbnQKPaEcBPp;Syya2U*8Wf7H#hjeI#d_ydL zsSP*M+%`kFnFVZXcn~8(pqt43;H(wIT%P%v5c^HP?xK@}PbZN`1$dxaD-&DKIaq;?Up-2;^r(<>Iy;#n8{Edn zCZTd2=M;&R2R!55&e-;U$xW%ENE>cgvBYsKPP)y zXMbR7;lB-(Jz%9Q;Icot#N4yTmW<$+ zqi<$jydB`tzoNUDUcD3sG7p%p-0vj<`=qJ+Ty#{cMenU7zx18V(icJW21$wqG=Pme z0!Y;SK(2Q%P2`}(K2lioQEA4z?0I_02fk0JSZbeELV^D8lPhO$DX?hgJvVf+6ejg6 z_hE?fuUP@~y^ANQj@Vfwovq5XZyQ|x>6;Lv&N*?xd0lgO(zv1h{&OAB(S54XSke|( z){K!qxMM$6Fc(w@4Lu_kOc73D)ZsCA1MNK;f&0C~gP}lQQ^)tD?SysZ|_6 zl%r&?Vc*si?}4)-RetOD`g$YW^Q)h5%d0sE>dJjEW!N6&FX!V$aKOW z#?1ZsID&q;}>C&G^{?^8&bh9iv3bq^YH_RuTnV4Xc^0te37vC zi5}EJ;esidG$JUp&;|SOq4No*TUC4){C|1OT2H=IydE+zXt#S1FfhpUbo(@Yv!g^ z6lPU6Oj$Hh?|SDA7N?Nhi&0Ca4qU-4%iWF@-P-^#i)Q9U6P01;9rmlQPD$?hwK(+l zN-u;u;xSWGipyAsUn0-kL>fg7;l&gj;4^DlIER#Q`{ZFHhDJ)fQDW6Ao1ucHGLds(AV$wXE{xg*R

    0cE`xAu2^NLZQDw#ju`Ipw=)he2V6C?9c-Nk?FHoh;UpQ**L@`Wo?qUk@V|HrTK z&!_~&Tlkjz=S1V*RGrPFp{-lV`2OmjB>qcRa$FuA!%+Ozm#5DELd!o_2wPz|k)yGE zU+izsu3r=gP0HiUa{N2Jsx+eVLLPtHz3atPa;~@jdiHOCwEw%NfIKao!?4_kfA8D= zpYsyXVmy&^1NW*BOTD!H@h-zB&#FcZzBT*XGp*ZHaWwaS@4R#u|27!l z;S(x3TK@CJGXL8h{I3hEdQtc8v*%ULzdegTbyigy#iCdLFZiMVm+d~Y3xslHvj6ue z|9h1GmCFC$OC@;2g1D!R`9`_r5&SppbdF(v$AR-X!-~NyCi6OcH=KJsdubTO%9{+70&8Uj(ZGdy}zX6E;uQm&1qbjmM z)w~mb)AL_{>-k1ZQrh2i`L2K}&3~8t@5cU@ef}#X|5XM5U+LJmpXyu}!c^~_aGEd> zd78W)Hv#c}gqvwawHgGa-YLqL>c2jiah5aX(ZAkKe_xRGHNxc2sZty|WcKjgkc`^} z1tST^7BiOppAAunHmtxg#V)~fjo@6f)bZ4REt%A~`HG6d==sUBY$3A#bl~ zoU8kW_i0*=w>K48uaxr9s90P|;&lZ_GD-UzvkC^)S(SQ5Q!7)Oc8YYR{adhNfv}Tl z=KHX{i@Mj~!6jaf4}O)=ofiHVbM_At_fYcp+iBN8p5R9h?QAO!h@|a7zr2*0sJDDn zoVHK0+xQ9N7_N~ob&A^C6+izR@A`i|5xAMv&ysc;?Cc;;bfRF1_vF92hySyzewaM3 zDqx|due4T1YopD*G=_KR9=CmAFAB&g={GDhk!2|ogq4Zwq(~(&am~|TH67&ChV>ml z6>L2%s<-Y$bhE9;TPICCj;(JHMckjshJR}<)#V#|gbHs?ZBwEM^$1TbEOHHtYBt0A zy-AbI@OO`v&>Enn$wWU?E;NX1vh;18>v?n8FM+khND}P(<^bc98vDxAve3IlpoJpl z4ogJt_saq4{rgvtSur1z-nIPNez3eZH~2nJFMa<lATtn5LR)T6A)Z6+^4eVxm z8lwp_#$NX#v{+z0r#wZwx11amdEUkx7xr8p)A5QE$`+jvYkq)q*Fg#nHs~LtApp!c z(h}C!BKRmTLJp($bG!q}QZJ7GPqtCbiD4d+8uyy*>zl@p_c8bcANNX5w76CU!RM9P zrTi9KW-f-yH=9Rraw<#co!ac919|hC+oc_5HP>acOKn!LZTn(3c{hJSgr|Vrsa6&- zC#QVTyvHW1%B^1al26grmGt&(zogt(shKJ{eh`2#Ud9?SMK@-c=V=t{<{X_8yQl!@KGT z{Ayn2=+k9SIju|16G^CzS-oi#O`LNnAxLN1hu=?DX}#0GaX*+u()H*`*%{nW2DO+U z9ql|KkMFC}ziwz_W^)pZ%^dEV4kqD+7dvsb9ru0m{eIc+sl8608(*;Bk?dLmh2v3Q z8O{)W-HR0-cX)Ub!8Gf$PcePnmdQY!;lh9z_ALrNU3fa&;?OHIFP-L>g6x?OUw%B1 z!*vkz$EPSVO0IlfGPG1AVgk?ExDlG!X<~W2EaE0U211&&Ds4eIUOY-Ayp5Kz;qAipNL?dfo3u(D;aOP16HR;U80)Zx=+_1u@TEob1ByP zAyQ!m75-Q1A_f*%vnHS&eTjlJE7VWx+;`wZQ=Rradal3vT`cSE3a6|IS6rJ9_HI0n zuupT1I60TOvn=;H;bz<)tSHc;dXI6a!BzPDdbm#t?%v%pOLV9yWN3a5$1?@x)=^lf zZldjYldIW~!~=v>exOfl+LQWDnuC{4ElY3AT|?g(I)klTg7E6fwY2Ie7N?(HGar-I zLtcA^;xM8@4u78>$pgQh3bte0!@E*?#k=dE7;#B+rjX;VQ)Is)=G*`&Y77{61N&k? zP>IvL@K{%&sNS5ns*aY|{@3AS=R6~PjmbTt??Pl~plKn(3}qxnZCoGRmu7YgopzZYfjG==2HcP?&jQVA zclmg2Dp}J7y84;A7jI~ELz9H{R&KBB$m*pRASZ5<+xU+xCThj?^C<^EhGZmvwr?(A zC7rat)BbFZYcncZgE{+p~zw~tAM9UHs}CoA}nKW4l7aaBWxE6-;mWCghc{l~X;jb|kMmH5`=QQv=-lTt4Q6f_``sxx>0>$;o#Ut1+A&>_Ydf zPp8Tu5zOTsf3gkiqXA@2ZP#;o{#KDZZ@S#X<2x#}RcY<^suMUZzxiobu3&())iop!TfE*_;TC}mPGSGm}ceH&LZ^Y(-G^j*A zCij1AnX2Ikch#G`DSOaT`_%B)vDBrVFVgf&p=YgU{YXayqv^%1ppITWpwS%u$Ye&z zF!p%Q`Px+V-k^*q3#W)E&7LSkvx?RTCH#2I}Op`{kQ1h2&yR zWJIntN1YMn5z=N3$K=s(VV;>fm~u=!L#U2I zCbH&$Rc$w3n}4Rgpn@J?Z#F!U1>WQ2AFKHCyl!ds6njp(q7S59hBre+2>wDqC<%h? zjb6&{&;?>W*54hABmvl5gw^{OnOlJ@WrZPVp}CY{#m9QCu(_yM+!_@mEIQsB*bhSN#O){OEf!rt`f%MMZKe4d2e`ID?I6JwA_R>yfJq*UkIPEa$gR8U)M z4z=_|(7~)0KCl~z*J)o%IU2|Z>b{-|bmAP){Ucf#c9Y@!?NFHty>DXf(*tv>o}5I; zr(85AL3=h$?7a*)D~SCv)*$|QI7f1Du2($s^23B|ZaNyZ1pIU!aZ!Co)ROajjv5;T; z#uYfZ+{s=MH1AjexEA*A2t86`n-nce+XVm$cN#MdWILM{`k~v;*iJuf#WSrzL>(|e za7m-!vC5{PRWZ1xTTLprY|n|56RO0%)9BT*oIH`6*kZpqr|fbamQH;wdpT+il1xqt zqVy{l2-t0eHgq~1(8)$z#-dIZy~1Wg|A>d^i|H$$HP~yGr`&vo`Rc#j+a0%QxK`-A zR6guc0}+ogwz4zW@~+2oU;Q4RnRWio>*Vt8B=c{y6zukgBrFs~{CK`=R}S!VYQqQl z5u*YU7d9eB%lQ^8bM;9pePwv%)%Aqf_`IP4`)+MXOQn|qJ-tb~u{8a3eaEeD6vRR= zQxCp;sU7uRG@^N@V(NTjtf;`{hHG== zs?x(hkM$Q_J*V8M5M7F`HpY8aXO2X>+CI{e_)T~*nk0!pTC zRl;zV|4LhjD3%F2rJ~mRaM>BQlqxY%cX3ujo^QHM0#}eq=rdB1n=MB#+tD%7IT%L& zFXtgEI{_l8(h#V5O%e!c@~|jyxhn z?sRV!r~pl`sT_C0BoB@Ydal3*xAGe2F~x7+TYe0hPs$A&R@YG-Nawa;U{Y+IY>Aa#8Sepf zO&lv*iu)@$qg3%KGbZ5dcQMcH?8Bf&V_zcRv!Gj@71@Fd{n9Fgiqy}`VDy`c_tycX zEk`B=jMx8kz`n3tcV&@cSH;hkR!^P+cE9BIS%#rH>sCyyOamvx5+BJH`Jf-cDfO=L zIEEWu91nmBq<2dw!~mN^>!Q<%4Fr1NbdGCK(~R@Et+hzVQl_h{w(!%RCW$ z{L4G;cDL&k4bFT`h_e(AB!%agKTS(tg zrMWu)E|8yid!pGECL(FR?lty^_geX5UH^7(N*>h^Df)b)5sEb@ueUgVE+1~`0PqU0 ze*gqbh@4Blrc>Im&)REUY8v^Thvl}HLG-ils_6HR?ZfJ{@VODYc#$~yoRH23pYYZs zBU?Z6Ev1am{Ar&sE{zcC8j7}Ho|f52u4Zg{;5G0e0Eu^wf+jigTvmkdV3@xT z+eO2CA7agS;?$M^VUF!<5n!Gr1N5d|Wj=5nm_vHm!7xPn&AaaE~}^uj%x$$aYLfG%ijk#vt+^tJP+b}LnnD4M6)_aK00%OkxYT-FB_#4rZR2{u;1MhzLhE$_pZt5Y4>h6cjj;@k42b6rO!P1v*yOolciZ$z!Wh=OEzE_-T5&vGs@WFh7%Zb$@BQc;>UeNA~p>y!*(T=ZMO z&LfcZ=W?0_r9+#+Jxz3e{YcAHKt1>@=R(~#IM;IIzh{KwjOmEMcjO9A@lt9xT!QHw z#2DHBaKETV=)6IWJZ;^0n(U1df$cx6@9#K}Kr}U9edkWu-*U&Qs??OUJO6*i-TslC zKC4E}D>Gb{`CHzE8Y}&O88{zyf|`rDI#>0l2K-Guox>FUA6K2a-;zSA0^!tGJ8{?PukRg7!%hD8J^g3W{oL=efcEFV+kXD})&KK}VW^S$ZySB~ z4fUl(>CUA8_2I+PK864?Au;^R`CB3)NuT4Do|xXQF*?@#8)-!nVg9#n#iyC-QTgYF zFaTIni^{X0&d>3t#ts{GnT8%|lON3tPRGL_u_0FcWXRJuAK*}-Y)~#D_Im?SZY3sgB2iV|u0t@p~En=dhpY=S{OW4|arlqZ| z^*!pszM-w?pIe}>K!0cRSy8(L-P(wQ4EnjP<&QrI~=z)ZDRR z8dH}nFtnj|=)*%T;0HlD6WzHoJBOPW|BOAV3VaN0ko4r1PJInzz23=ydSn~m684dB zE3tgiTT3@rq)^(lCi1NtW56LW#~$Mh)wKTcV7^Tf(^I_}zP zH7VZkQa2y1do;g0FvQT2hW*j$Sm&qW41eQa0cA#R>3+8tcjdi%qn&y)gjO!~b) z89nUqs+F~zOhc{Z8B=xUx4(;y$8&Do_@;b2VDR`)f@m3j7`1nKoWJs!olIM|+sb+u z@W)9~$+n@&K6N-ws{G%|k)Hi7RZ7H-pZgnIaWVA%WCy-ZOOyY(F7k28obidiu}Hwq z#{oj>b%WTFwBq7h@tKKYa;!D^k2}kXcCP$Ixth`Ej8d4A_Trxc|Zo5ADM8PUK9?ndCC1qH0X8_4GfqH$cwRZwu-T__Av-Ppm%;Y7vbOZJoOI$H zP;q6&h1Y*@EmSYG=*cwP4oZmDG&>sA|;-#W~x%{~q)LD}J|@X~wSK^d^t2bD}yM-*sGp zE%&Wz#uEvK3;Px!hua5%HyuPr?5X4^<$`+V|;Ot|52xu^j-SUxg|B@ywoE z$=V(3!Vl*`RKbbHz>12#Ph0=&Pu<*^bnW{&ke&$**jbY}EYO}>SNy}MhGD6pKS8FH ztvy)HHJ@anQ0+9s&pHFp^XGmo?c@l3t8x}DyY9j$q-h1sPdDF=d4|`wzRpr*TEr=i@f`o*0N=tXBba#i+jdZsNNOy+yloKL^~qJR-sHs3>@O zDu|)!)%HYj7zw4?Kle(M9@zKBqd<7@(G5zfni!qyNZi5~f}rSl08HrC6v^=4R+vv$ zh*{Pw!Q-%*$M>gjPfT4G)$c`m++OvZY(k*p8dfnkH#gYgbYL!%5i-u*3dkxb$&y+} zt%r2L^=dP_Z?z|irl-g7pJE;<^M1Y!TDF;O!K!^Il~kR82p>CfVa*}1`xtoB)?1*J zAE+I4R%i11l0y>x`9mjD)4XJt ztz4j0hZ~34P_iX;`pM3cKVfWgaaFf%i=cj&8c zW4^Oy-~+lLYM3L-ab=?}JX=XW;M_6iB|ogpcmB_S>*P_mRx+H=gHB7~J6K+WV9I3a zh99SPQj2^1zOvr>=tJ+vM=;H33LTpHw4fE}Vd1T}uAc_-XIQ}Ayxjf+dhE1XhZ%uL zidH_YVf?AI%}GufnjW2k5Y=_Ysru?Gy?hg#^SSd9P5aaP9!wQ&8ij1>^6Y-5Cjg3+ zBbuT`dO;h0zTmmwoYz{2h)%%~mgc(0iNk4=xeA5$M9J`(l&hHG&<6mySM`FO;NOi^ zI2r6V`yIwu`dvH1dBkVrGK0zVd8VuMFLYgYDV7i(($9FRx!OQu#ztv=)Ci=H`)SkF z^!@7D%s!JnRiRBpU2*K;n#O*efiVNO{0S^nh3v{bl5L$TJDCKyL>di$yFjH_+h*3f zi}S@?y<;4e0;n4}rij@6>LlXi#7-yJd_od=+`!0)o8d*B#noF0;(zQMB7KR&94N<> z!>aWOPDSyl8X=^OmX1z|FX<|!e1GyCyaaG!_pHsDFsGOoLA`=M+nz@$A2#>dYfk8@ zPETKP!`jZ(A!S-3A`;R7$3}J1aS&a!g1o%0OZ6PRLNm3MwZmSmQQ~I-QeED{%t68= z7kP!i(R>w|z2mU4uBSHOu2|Z}?${{dv0TIfo<#GQHGGatY3V z%s7y?#NJg~aqN2SS_m{O9(%GiKipFcQ*MWzDK3_4Ob{FXPzwQx^rF`N?%IPa*#UFL-}KfAwk- z-#(=&{;ZK!yJ~u1Axk02YjNsZR=&a3gzemJ3~vB*GNPC3e3p52I#8unjRnuPY|wm>0m8KjDCzUS5O;0_!{TTFIs+X2?2jf}9wrrv69 zU-)_JE_`XRDpw=SAOrT_bpwdn)QR{%KS+G*#%NJxVjqh(WS}s?pj{>0l+33=D4e3p z>yYUq!*l3l#5P#J@7&T;^}dy)_9Bu7de}w4MmK8ncCXHUn+@H#URb$MeZ{hQ^ZRzc ztRlZ{+A$FupdO{pyA!i)P7dwJ>@`{(mE75(zMw#&X^%!cyr;jf3sLJ2CrL9p6uhRm zE%s1{YyJw~vu1`f4%+7#j>J$WNpOAqZlumW-|Kj$#yU#VWs0@T;}+hi@tKJII-v){ zN>`YD>Tz=(;*ejtsU>Su9J`YPnD@M<3(|!v3sE*n^#3zaiuq{ENQ7NFe(t!$7eRbs zUTasuaq8GB)MlgGd+*qopu=UyxR>W@qa7ONIIsKtJp&T7K&xJwzUF{Qt$a=UG9+F6;5uB304nvsMM@{db8h0{&kFVF!OffG!X8RTo)4MUG6 z^-ws$#Z6Z(Q>9jI?^1RS?ymQueNp&wD5S|Zd&#_B7{vjnk|pqJ`4}tv{0l9b$ZA3c z99E@m{cb4n>qFN^aNyV?Rhl%Z+rb{zv+&lvcw>aK9TF~1z+T6IJ94{Zdtp2UF^kfo`BWo{N?_CiBVgM6QrbnmGDoYrCEw_AlFPZQt|W zwG^gIG&|ebvf)tJDDD1coj;<^8|MB5mm|7cz!8#8g(dUd|JW~pAHpY@Ul5`!xW z!QrsL{kRBo_MyXIJ=B8KvUu0|#5Ivo( zcP#GLs+sL!vRe8QuEk;xKlms}h0ECxtn(t5!$?D1oY!Qg&N=HcFX_WjI?T~IB|#Ly zT%WHC0^ZPA!FBn`w&Q#IAftOCZMX7f+9{#p`g&InQmn+Md2IbNxKP~~? zQo@opyc@9}G&$QE2dQUsZ-uv~+%_k2<*GwVnEyG$(#LMBdZB>HporDVOQ}{l4`nj# z@G^}t^m7l+Sl$5m{%5>)(eAxvpMlN^wRa^L zLBFK=uWyw!>E$2Jn3#?CitXO-(?`ZAvC&W6aZz+l@dL=|ER0#?ANMB$;g&ioHdA`s z=xRZHl{lRSO9U{1>1&9{=o>c{C;J667nN@u_~#fZH*q+0acVCCEHQTv0I5{N`HrYY46 z#U__fDTq}o$C7ySK`utn?%M-=jly-9lnzNBF>zS$PbleGSs>A#RC#8DU2QR`rJ~TY zRAmOKtv{$5A~`sg#VqgQqLk^Lol6OWrAQ@t`7+A9ErWRabgumMmL^`)Om1UE$~PeO z=b{%6)D49bqcJeV10x!dl`_S4Lvst}7}uS?;AC3|{y142e$7)U`=#U|A8zq`} zFw++@Us%j9TXfflVy0q=?m3`jM%mJP1HM8;<| z@0ZSh3eID)+#k=_sT@m3^(M}5{c%9*?}GqTD6V(3F;fJ>zx)Ky5I+jV3sh2F{)K9L zEJzN>I5h>+A5sU1JXt=#Lh3gjjs9yx5cLnZ${6u46oLSe+K+H^J0^SnA7JV~|CRgE z5cDNb^51^zp+*H5mB#BLNt0an=z_Q}|2AoRk(LlBfa)Bjx%v~nyFZ{~t=z$o5I_5r z3!DGv)&XRV6hPLX=Y|&Ae<7CP*qhJN|O6g*aZ13aFIxlfl{=@p5k1Hc}skMpgz&5@TGp{%5L4y3dP z{tFZK0gmN!Mk$^YTNOHmS&Aw?@Y;Yd42x!+#m-fcHcO}5ME*HFfF3b^Dx$|ZrM9Wt zRbAXiU`;5XFiXKX_00l&79IVATeY7`vi;vn;{Or}0GL@BMWDA2t4c?GpF)(I04g*g z%EkkUqmC`d$3bN5R@FOm5{`qPY98jo{jOj?B;k|ScT(9F!yTO5wl z<Omw6fd*Ib@)M7O1KW(QNjjU!v*V9HNB3Fq% zTplP(FIotcOgxVnhUF31B${e-GxwTaG}nA4m4Pbvl#H2Fi>coQ8AL9w&n##8dh5>z z22z$7$>d^S#fe8CChyWy$Bww8^N&kVZ41T6B&Z^%ZYe&H0$#6x1)bK6XhtfkrLC1Vk|bBZtI!VxHsQ)iHz>g=h= z|4;MeA2V3Y7f6ZsgG}HmVE?ffKV*sVM+^@WRPe_i0>AZ80vdf;#L4;xNdt&Nz#G6p zJyq5J(R%q$DK%*s^%%ns9P5X3zWI!Ii zQ2V#vB4vQ-9BJ8_{v(Y256y?_3z#45lJ0*|IihekU^-uz@^CrFnCkqUvQE&GF!`k@19z;ssY*G94bTsi?Fcwp@q-$(pwlHd}U z4*3FW_V<6=uK(N0|J%y{zqC?7{i$m zuQ^YKhQ-L9Ybfb>K$V=&rq&H+sw`}#O|rY4*MW@mOSa%bRGU8(P(Ty0!~KE9`5sB_ z<$3khXV9CFukYZmm9@WNR+vrD*Y2KiLp$TN5!EV7^`4P?y^t5y-%^3BI-(alR?oXG z_(sOY(lE9Sud<`czyh8+h_XPCKW@J{4WH2obV|r( zF8YmGvDPC)BrH5D*UGQdKU} zB)^KsP+zRk3yY~<@Q~rV(~pe$>e<|6GZqu)H`qV@1h&@M8FRd|{YzWhTU*>i908qODfKYKeS?0c%7iq9%ib={mC$ZysP#I! zi@>`xD9$N0h8K^?DZJrUxw9w`ld0@26EpLw4w#2(d%Q3Tpu;#9y)1M7fJp+dj{%9G zqoGmI_S}V*jPkmkW8q)SClS=#6X39020JSChaF3rVW;piCR!&vL2?XA%}x`elnJMfQ-q5~MMq=Q=c)JQ z&EJ>eRn*i>IyVm=7fB!0?~QR{o&D}!Z_sk)*~yy+b@}6LSEQ=D9T(gQKaO7S*K172 z$C{nY!|1r0wgoJ2a%;X)X%YT_sgCrqplk0U)T7@u31 zzG0D_B0ctbN!ocJJQCma4wCa_%zbZ}0ge7?BC937%h?9~4@MUv&8_x5p{(IeE1;#l zwd-1o7+Lm+(hLbxz{zMgNRFon`^~$2RVYuMJXxJP=L?_Ec6}F{Xk9pNHMwYRI^0kT zd9sbw%%LoO0sU06B08W`B1_?7SUj47@N|R0MQYypoY#5dtH|78nTP!{;?X)kFWJ{K z&aE8MRmTZUa-}3!p^SBJS+VKlq zo$<;J@}8S>mdNZsxmKL=??P(?9U~CG2=_z~ql8bDS z58__wx1yvXWA_D-;9gQiy5a18mBI5esrYnBB~IE>n%pTmlsBpI=vU_!I#fT zh5Kw+kzJ}?LJw}*7F*Tc&b8Z^ZrRrR6QEMcO_|VkvEBI&uLlG?zAZ|6{UNf69Rzl= zq{;E9HnDAHkpo9Fknqg=@!xQ~<(GK>Vcy?IrWk%+573a_hlUGva8Bxd@UQTT?%7xd}{o(t~x83G%Z z8|~-hEG%2?GovCyR_C_+)0G;rH;szZytA!}5Q&P*YBZ{Yq;G*`0t0s@ZM?6@e*#tX_^!2~9w zHMM<=ldth~Ja;jj5B=5+I=Y9C&baqoM|}eV7YIEiBhtPH554sgA!m3bea_~%r%6z0 zO{l$fnMcYzd(3J!vAH0_e@_k2O>uWR=ZAgF1G$9O_ReABIi{qNBnb`g*C< zD@$D(zUS}B?7-IJn7`#qa#vlR9`JY^aHpynpUSaxDLD;pbX@Q-JKiW=8Rt^AoUI{F z^Ek_TN^n#tQ9GrY^~vStBZElTofy13WI@lg7@`_#anqFr-kAB;DY2THoo0*Y>ty-7 z3xz-6+dF>`(k$`V*Kw<*cq~piZbzF~^w6{OrDV?IOc9ic^~o$Ic2O_5-#$doQ6CT9 zd=vcmwbb6aKR%7OgZY|qeJ~|Xgxk&z1H5zE&&4B%%VJIqcs^z!?YPH-3%SS4+UI%* zpZro8n)9-Sr)l1n7(=R3R1W*Kv9?i12u$wdlR2#0+~XEzR-(J6+QWF(E}rj=P9g!y z2HaHkaT+QIKhJQw1C=9Cc%!fX$!>Fm4n!?ZwwuwIoeT``jXHdVg00W+q?hZ{2tC;D zI01*}K-;M_TVNwNsM8>LsR4^#GhF$ccTtJQcK>C^DutBBrqjp0YoEpxS;?i-K!=rH>dt0dQ;WpyJRQvE zE;ao7&G!+mjMZZQTqL>Y>0S=U4g<-o+}~dUIXr&j;&g19+T9LC5TE;XeT<7`sC+8b z!DPU|Vh^kRv|XI(Lxg8%F6Mj;ff$Q&#{=nK>31*cOZB>>&A3weyUsSRD_hLy*xLQ; z=Z-0>t|n0!$_F&6I@BvC%&fkvBVaD7p+EgP>`_L*`wjGZy04!rQilVlA%C$G9V@60 zyWN@pLdPm?a{4;pFpMf`!a_Uw?6y^1<`oFLYjLZ%@oU2cDkUuilKxHCXzEn8?K-_P z<7Y9U<)rcW3%Wf_{850odO@A77%Td#yR36LHzgmu9;( zOlqm;a!8ttWR5FLG~u85YDf)3Sk`Y?wa)hEh&>0c6ihkbX8o%XK+c!C0lsIOl!QIp zR?SrN9nrP6T7J|~1?Gm8uncif#Mxi7lVxYOp* zk@2)EUZGE}+dsGG4ri>g88f{U>TI)R@94$lTy~w?Kh*P#Vak%IRVf7#>WLgKcNUbc zlI@WwC*znEpSetB&}I9)e=TIrllgsnc zl^r3bS-RMasX*76YA{yf4B&Y6zSNTaZkLI?1*H~?g1y&vLwRoj!Z!Tl`TN_hur$;F z_5eCR!YUxOTqu}0?-F>1x^wHdr)5vee0~;2z`dq~%51OWd`XV^;9jVlD$L$VaBIwJ znYachfSAkkjhB}m7Y@|^@ZqbAij!)*Kzb_%Uk3^jGV&NdoKUmNX<}%hl3=Hh?}xN{ z%va$u{T}5g-~3z)ICdVAUL)8}tk;Q(<|!Txx)R2}J4H)1xG&PodU#g9^Kqs{3l;@5*b^wxo-zVgjf=H(kFWFm`gK!jw%~1O*OdZxH}<=0RE-pK z;M_fBTHeP^&38<~xNN3tjD}sIFBu9o7P5VZlS-q-Ly#QboynIkxTjU`zJHC3f`WB( zV9t%R>}3E>9ne#O_F_I_+>=h_$(l#(f&#@D;sLlhXbysN`N^C%qTo1fj1ED&YWFRi z9NDI1*U~-*$rb)^zLi=bU1Z0k*Lx~d7U=vMELIepiVGTZ6XRK1VCvA9a5mT=4DP^l zY=kI^(5M2n@(q`B59a!y0umyR@9(@EG zUxa??v%5I>@a9ayZD={M=_T7F-ivi89ykXRVC2(m*K0plr-%rb~ zbc+IA-JC0)%x%6(acG9qng^F8ub{3M@=NyK>B?<`>MvuJMqs|?m3LD!bl-TCXB+2B znq^yI&rDtBv=&WfYa;x;KGV9~Umxh*`eI4MP{(A-XNk+}Xz4LBkc4&x>b{z6zW3_V z4)`Dqk$(49UJMC32faIN@Vxg3W1B@X9V^(>#@McMCA5L|W@Ts88lm~A1kWwPz+2(y z69hB(9>_Lns)eb#bk|Ku8OZsy&LE~roh#-Rz*V}qe6e_ag}#B^?qo}7yCD-r5rS7g z24q`MV30A1H@^FO#fD4nJ_x_HZ~rV+riEyT8g~#nnb#ncGC36%WWpPMNT}d?;9^_N zPazX?y3JmPwQbSy!M0kj_xeS@Oez<&Ny3U<2~wC+AQWUsu>T<2g_iW6HfQF)B1vI1 zh*vtF@u)XyZJ>Jokh{j+9C^Ka36Fk^fE+ ziRJeC{JB#+oT*3{!6^reOezn70fFP=G8v)G*6q`fXZ z7y6LjVUZUc8Z!*V^Mj?m*6o6ce5>w^Fy7~OhyDJLF4e4n#5PW&GrL@kY~|q7erY#a z(yC#r=6ZNI_G@=F?IyRW4E5emv0k^I=WH8&0)kpABwrf&M(< zkR%$bE_>JzSek;bDG=ofvKQh11B(LOjQZBe!{L zC$m>$UU_)VR9jQcOXTIIBbkHuVq$AqvkS-Ne7}qO14J?m7?<=*0RqP6ERRW|FdJ^G@; z2TVK_W()rW?zX*yCiFPkp8{yuOqNv@ z?D}E@?$?3i?BBILbUdD5?zz1XsJ3E-=yiqm(TFQLGVv%3B(X+(6AH@f&m_;1S-2}( zUWk!avNBN{FHqwh^v|6@#vMI=b*}s#jZnWIdruV_oj9G)L+49LO`s5{X#5*k0kBJW zJff_?@zS1D65CwH3tbE1pbXvzj&EI*$R%PWO~>+bJvV5X>@4TYl-9sHM5H32*2&AY zc2hQQ`l&q!s7ROEgo5lFC>`HCn|BR*L3haE$Dm-y`>f9Q$w-z&_QM*|0Pd%4?cYO^ z^m|A7p_DPZLvute$$YlL;Yz21Q?^{JmhVz9G)4leP$y+Ym_Tef!{NUY?Nm=}tH9s~81HcM@fc5l6dzjhA=au{9k z-l-x`=^PS-H3^g;3JT;~TiJ(I$-cI{5p@2xqDO!If?jh6TQApez*TE&4*4Y{8yr37 zdPe>A(5FrX3!2<}fcOlh!h#ei*Q z>Po_3e3eS?{@Xk5dA-BYl1-Kg~g%@2h13x0}N1 z1qE1bVUzKSz!p-=&M%%>edUAmbf?>6r|Z#H%8Kf;J`JIKY4I5~qJq-$a+;qkHH7baYuqQ1Da3_53|v=eM)n8D~2rAhHEY zS@j+*o7dTyJR(6BT%4qhmuyTy`L@`{s}oH_O`v&qb7>d10~@iqQe`o-`aR^O?y{Fw zql?mkWJmTT#3*1hps{*_?&Z3zR(+q>ojZB)G(x@GepqpQ!lSJ>1lZTkgK^&ao)_T9s*vM*K@?$^&m(i`7qlo&jB8;p=TPzRD*kRetqXtUOR*7nqHe?;!A3h743_5Yil_xhC+hV1M8aHeE zQq=9#fACepD+SSCr?C0-ut+7cguvsLF-ynJa*C(d<}c2|gXn z{-8+wyLYT*@sMsdnXgWIX&8Efc(tuLX#~5sShtL=#=^rDkIqM+^w$Hz*_6Oxv7_ev zcTNb_SVJW!feZ&35&w%ZfbT6Sf-Rf|(W;T}fWPS8{bVW6L^V6BQ z+4x1QJERW!9Hhcp``pQM;~qDLu2N>97@E&_y9{8e2De@2Exo#uYR1QJ zAW2T}#1$IVMgvr*WiFL@BYR&FGC{?nb#lE4OP3-pEa zH6r8zrPt2UQ%C^vGguhiCVpmcCjPje|03xw6!z_pXFiO3 zP|7?Da79|X&5C@)V>)K5L#3Wsa`qzOc*4$PxFO-%vJp9q&CzzNI5&#N*@kXDgz!4Q zLLP$NsOBCMgF1zX@N8pSL6fFp#V%!jnjLzFw_>Mdoua%o3c7CeW1m1-@XYTd|9EH^ zpz`RA!&)#EK@W8UzFx2Urho2s#mVw3R-F8vssN-I1V*v)qi4Do2x+a1VqC%b-2=1s7vM=f79huTkWEh-(TQ` zGL3z=@){p95Pe;D;((GIdY%pDAUK@iyE=Gyc^WtD(}6JmblGBoPp5m9v%KKKc+dX1 zuf=4wFBBrfXXD1sW;!Vl=0_s*q?WMP;y1n#pdUpQ;UaIcY2{8iyt#vjmbyamRSv;t z67ra@npKBMwjNF~6S=&#N1r;DPUfKU55tRyq8{Rg1!KvVRxVRNyR_kr|L(Fe^nuf5 zC;o*fli^d9c^*!@?1RgZ`8nBjz)#zpY{=c#r(Xa8tj^I|kL`@><`O5zw=4J6Zn@f= zvgXgPh{;{6%uO2%upZ~%VQatX9(4kuCwwpdBvVSWaoRhUC3$vL$wS(ut5B+8+QKhk zpN$ZwUZw1uBruohJX!~+#?kZF8dU|-W6O>GX?#h8M6UZw?II_)f=6Ko2d|+Pu?lbB zF~?K$(J@jS?4-$_GYNgckk65Q(|EG^UYP_2Er1=Cl0iF$oO`f$`W8$7K2XAD^~8WP z*CUQWE3DcUzI^E%GrD7&6-S00=Erwd7$RAg#j6eAgJU*swm z)nL%XbH`^JY5de%M{I8s=Cvz(A-crz>MaFmorOAruq4H zWba7@P*`3*E&3Ju3FwlBPx>(*3`X638hyt7iYihV(bJ$e`bEb>seROKHqf>0-i@k*oy$fle=Afo{YHe|%zGM;!=v{5t-|MqTPlSqv!t%t}vGu_c`OhI{6X1%p z?q%kaDl`n~_!C7L4)}AEO^TzX9fi;N0qR&j+kp-P=&2$puWr<=m)m^*!Z3@LlmaljmV_`39)IhGcYSC}LV($b!G+;e#tY)H|F;hJ8RfYrVY7MJabE_s|3}6Ri8nt#eum?eZQwiPAM}4A1 z#exa({48;C-9_*#JoeIoD_XdaNxfq{{9;6MFLs->tOi6e0H1#COP>m3e0c<21(KDB zgU{*L4JKvKq0^@?JxUD?{(deD+5?y;#&`(7wj#XyaRak;+|E=S2QS#GP8Z}JDdC*@ zX40i*&NmW>+nwqkIv=gZI-v08fH5i7Gr<8=hYs{?l7D01kL!SIaaf<%(8l@O7`Nby zz8@VSJ(@Vu3q0!uPEDXNfTY9%L8EBCG94KU3(I^lIW4UsP-$5NPBG~wZZI(&`3Vy_RVhl5%j({x%L>C z2M^Cq75dQ51Zs}gXSO@7T5$sfiKJ@(jV_|+p8Jg zM>%C%g2X+<#6i4;`vxJn=Mo5nJJK4hANP8>Hr=u>uE>5X7(zT-n_1GdY8Ql6- zX3$r&Uk9B5q4bc3&K}twfBLx?fl&{5htSsD^@@LvvPS~T9V*frh#rT2@&#ffN=#Z8 z6YGZaPQUnwZ=2OL34ALSwfrqp!a-^wjn3@vvj9di#Yol4^v^D9F7o05h8qzP@p-MC zn6l*zL(G@8z+#2VvAv$CZ2i#}QU=HW_#X!{o;D0BUT9w4#Dk%W_hUB$gdk;}1hAJ~ESeRc&*v3D5WLd4timh8H z=J}{fUlLpIbpgPbH4&igpAN-^B(e34C}~)|gh75g!O$E!Hldv>?~HQ{u{v5mQKP3_ zV^AeqaOZyuansg0m+^}#E%qLbb85VyN2fB9ldy8Kxa zgZ=s2BYE%ybn$EZ_#Ug75D0uB0sj<_q>#T#jH>4 z>Z+9spF(eH6{;oTG_Qtb0yTwsXmpASMr*1Qj}|2?#y0uimlQ0N^Z?{F0t!K-ine>7 z5KmtWvp-q}t~d~!OR+lNhMemc9!UrFXm6gzlBdSd%1IuS&v5U1kfE;LRJOirsxCtA zGE~wItl^{mkS&uI;kd9(cZ@|lK6B&$mo`}QVLr5#si)4BxGIo?_94ZQCa?Pe}!cqUUfIi2@bR z($PBDEXAdFbn{4VpW);zJMA>$3udp_=ck*6x0=NN-3RYaBmyvWU%QVRe(EIIBB?j? zdhvxSq;i^4=0y}9j=cCeVf6(o6s=Hz=jD85MQT;;wqT|mLD}#=o21nUS$&a8h_OL0 zx=jMTmb&}ZDm$#+OO1u=vzHiVc*T@yJMW#fYO4{f%4PUGz-UD!ga*aU;|j6#*CMVa z7Sjz=&=SC>bOTMFbU^o8gkG0SxBtB>4fS|Z;kYl#ECV`)bSyr%0~JbDNEL5t{=wsG z!^OBrY9(qjO5JSUYe~wfI{SLsOV@-K+pn{ZhtCEfVucUKdOHIidoRNKs?4TaU5Nr6 ztS42M*-CweXgJPdOG4z^-{i72A57p%I^*Y&`krbx7f9Tp1^fxe=S za;93yW`Barvw5PXswiTXtYCS>hp5Nq;2sHep5s&1Y?`;QF!9d|lecEKm`K;M!Y>Z@ zk~qyA3gQ%-JuHf0)q~cgv^tm}s#~MPE_ExTCd?I1_Gh!4hy$pY{e-aUm3c9z;890) zLj{Nmarzs$dY99@Owow8+(f&bi<}@QL+T}-{f65-7PCVydf?L42pQPvk*xc8iq$|$ zE~fw^L!%kCLj8mA{gcz(>BrirfK<~MPb|>`iUer3fJV4P7GH$Jlt=5*D1M6;G~ z&0Ft8TAL~9B7{it91Ry2N(a?mSGLMvDCt|?amSpvySY04n6~1>nL4eoV3dhf_N_HURoG&f!-ic-oyw ziiBA950^Q?Z9RM(IX=!?^n`TlWfS;#8;ODUeV4a;ASTGSYjVu=mzmg8TGCqN6wlqEwv zqnC>gu_XP5Bf}D49EPIK@Ex6wmM1INiJp252V`-@G{y`%uHCP9x&?VTC{)-iq@m7W zk$&q)ChPh3aFtS!-^Y0n2p+`ZjI>JYS2$?=7Rgqyb2%+e(|1*Rg^7M$^yN&-P-)jMEZ ztW&VvPFjBwFIP7sM43Ops9stSp8SIsq>&37CoiblVqvL*`%Q1ve!!N*z?V+6)tf`> zk(Bx~)d7SoQfGUfncN525Bx#hCKFjK% zQ%W=D6G)s`-+by(Pp|j5{oLj8;;$G(;4M&75Q{qMEd4)N%~xQFHaCV+_9F;ayCy;P z0g1XcCOVynG4-zVxXFQVYtuzZcfQH0PvUpyyBlXtXKKq-yVi|`Ky=F0Ha{e^OPwcF zIvhr?6C3EJU#VAwkA;zn@HF-l3A>*;^XE&O)1XeT^=mE4S=`_Gj$E%W6dZj<-csLY zER0rE5JOr8XXi_%u)Q+`C+w@bx3pM6j~T9dqEb?$0LQzl>hJfL>U_*c{P<+OkYe!o z7t<&304@&`B`!B84DMs*I`Mb(-L9gnlfl7vY`vXGhoq?qe3s&+B-p_)hMO!=V$1yVQlvRPRSZ`&7D5aE5oQ%_b;76h9+dOB^6}tA5HV zBlt_Mz0sZWl%*4zc5*bE*W7zk_zT--3g)oPZaB%L?W>dfsG*P)dB{D@;z2UJj)Y6B!)NAd^+Tosgx1l6#Y$B^RE@i%@5)ALv?Fmyn~cQl^VxEdwXJhFZV zohbE5t8Yc`?_fBN^<&QeqB5>dPK13n_YsHn3hUO^3PJ42yi0zZt99@Ur>%aR_rs>h zE&@;WiEG;{+E#Fn_SUdeZye1^MUcVYd)4=2ujoVu(SKPvuY0(ooOpxtiD3RPs(aVf zh39d_e%*;kIHzxdutd5RkBVbzCQ_TkNo;O{j6F7MSs%&cZbDP_@ z^?tBsi8|sj8ZC6lxvbBnaS8Cw3!}Y`rG`W!ip&_4lUl`Fu$n-NBx>>%iO!go2>1Tlc*RQ zH9GVRaw!`rg26^HdGbc$)2*=xd2)&Qkv`RC4_{i<8quP=I7u!CH%%sk1B}m>CjQHp z`q|PcvAnKlh>eD4-?2mUyB)$`V@^J<$PE{o9+zrJ zAe9mR+;>#2R-rOFY65}oOxafD2c?Yxv_cMt2J;CggWrot|LtowKSmFicIBTo5sefO z^uWWSQt(f3>B#EMo&=3*dxTazF}TLeX`rL8Uh8&QcEjgTklKc&!;X<^q}LTPvCb7o zE2+IX#L*<(>G1P`d+%W}hV~Yd`XqjFV9bbQVkAsCdW?w35(%OVuLEg0&}md^4Y|;M|xf12P*49GfWap`p5S&&UT;0ZQAP&J&ubSeWJhY@@qQj70Rl@ygNIc7ax zvi(+6+PT>kp(p-sP#Smfvv<v!P*Xj@RGBIx^CnhQ!5^W zH(y1vEGIwYe;vsZ7wZ(qBzxy_56VsDc9d4Wsh4N7n%Aw2;gZ2|-fBu5XH)t4s6B01 z>z+0-nB6^m_68d?BJcX}mapw1V=TEAKsP#{`zc@!BH>R>#lK)Us>rV$W6}4{HzH&T z9=7@H08pU@{qYVD0oB4FKN*rTe%$s1dRk;Gg+Znw5Y^SrLm)m(+~(g-SM@pq>l`LY~wO5ZX6 z_C((GNiAAb=|Xw;9}@XJa|_g(9E+6X6yd$9F+7z<-6tT@`?A^}!I8~!!JK@r z;Ay(YzkyKeaGDrRry!5ZUa$*ZD%d9V9y4R>PIeBm-_7Q#1Skzz!mx(pybt}zcMh3X z?`z4(B`{*vq_!8Fv7G(2hZ`Pw_AxZyFWK0OMN!09UAdF>L@n^bPPe`jS@k45hu|@N z_U|!jT(JAlhKwe<1NSVgk;He6AiP-_FKT-NEiL>->QwO>8${UYnddSyq5t4bP@M{;s^DGykythqxKmh;B|O=msl@)KRv%7naEbO zEl)Z5Qln~b2tp2|zVq+65=kcqx~2&_f`OxCwq1a8KO{c_<-m8oHix>Iix~~7!#<^F z{H2PMrM6+_Go>1iSZ%{8)afZ4&g!9r{JF*D@4Xu$sbh>jng-WaZfC&q>h;eRz4*$5 zcDBcxQy)iXX4n(YNd0|qcY5Js59@nLz0+sgSbDXSA~f*Ff$ec21Zw^)UAhnbMm>>; z^?Oh16&4_lodd^*B(|SG(eoBhwicRQ2m40tDfL(GkZg#T69omeqa@;r7RKAAH@jso zfooF98|iW04T=$*_C+t3WlDQU$&=R-n?z|r$Ve2QR+>d>T9Qc-n~eC0CoDh8a=2{U zYSzkiBkMH%sUc*G+}?uZ=}YbI9UBV$x6+MP8gZAE&yz*h)HL^?vl&nWp1pCIymM`Ld8+IagJgz@;Q+an2dqtCW9Zsg|o= z>2t$rRy5eFk|}#vFV54>yXJajiL)s;p~mUy(i--^RRpN)uz}(NNrgOpJIiWWPqMNG zZU@No2W%f`Q$p^NGkAx*36e<6rGT@P zKS*nTcIm>uK>=BK+>YG?v=Pz^f9?w#XOmyc^dCmz4N*ADV~g2Hir1CqH<_E`wFJg< znyKzr)>wy6xgnX-bSS(-@^!qB;k+QK&@M}9iSj44{{hpE&%IwxWPlplxiJ*f_Z=ia z9N5nfEsa{YDSy&4|M?P-BJk}{Ac@u08}qopd~75`_j6U$4tQKq6v~p+Z`D%;=P9Y zKX2Y(f?GLrppWyNWg7~E-WSBu%{V+BeEt=U^pl@KIB=7vcwwTF(rn?Yu;TXn$YNF%AjiEqpOCU7Fm25rDj0g+*?-flb5j$Kc}1a=7afQEvZ_0e zbsHcywIZ1eo?IR@YTIa?y7tSgtWKY90|ydV&%G$>JP>%ZeVr%{3SOdL=XBj`C{QS^ zcs<|{*84e5)1b|l{R31S3U|&R7D+~bsy97Z>6+HBQ6i|4D|bskrCks%VxNrgX7q#5 zXk(#PexgVUvnlSuMIgk^`L4k^^Nss@#d#dPRxD6$oPS|Uo%D5!yTR%8T&ZP>E3cqd zSX2&dM~3=0kOV=Kb6Ug!iB9UQe7>@hSR7jjgqX?kZlO%W02>JfDXOY4sV7&#NWUFa zNS(2)%;~loFP-SUou`pqx1jbY8=Oq(|BUc?u%2FXNQ!wUa=v=jw+U;XD}-kR<2-EC z1Xv7$qRQk$7!1m)rg{1VrE^^Z-$9!h5Szjyu3N`3b-;raRJtxL8`vBSeP~Qkv9=sd zkJUc657t1Q(`>!aDYXf4(ZP#jHl21-w+A1s_I~YBOr7PN%irF8KgX}1s?lhefs1sl zpN$KN_etPXsKH=RS4~?;l{#+ge0=FXtB;v|$X^ULoSQxu|LSw8c@@0w2+gxEQbni`Dc%=luaDr!2G126;wx44J>h3buOI{RlbubhQkUdg-Q;=ehE;XNq;^%&JN}1gAFx>s5>R#&@4*1<@1`Y8& zxqWia0gnYF%4rDEsa*j%RLDj_R9k zG7dM0?^Rr}svg@Z8et{qDFMOV6XWfe*M1k4d$%oJG<@UP)U8F3mEBC2N^$!2)Bnfb zTSi6IwST~Zq#$L`si1WCz<`uANOzZ%NH<6~igb%~jC3~)%_yBi2}t+Q!+U(bIi*g*RBeqW;Xex@`)XU|E}xKbomw?u)?}5$FB%IY`rDvQP#^9T_Eg0 zo@q6C`k>A2%uUx`c${R>^tUGj)V(%{+8Pg4oTosq;gw6_aIG?TLyQG_AjA*aJ}j@f zNqh>NWm4Ph#;-YMY6)blDoE?W-|)aHC{-_jF>Z^3znH2CMtBnh;!%p0giO4*kRROi z#Ypcy8>`{89YytQ?5pXM@}CR?nK6rCllO?>9f2SJ_t)UMV4nm%Vvaj7rS z_Uz>ReVkQ`*Am!4#9N@RA;N##s#**QDiE}_X_pQ+?Vc1KabSSYt#tR5d8;>> zJCWX=cytO+lZ@6{4}jkks5^P)>b;o#=2VJHa+5b(o?LVS@zPq(fV7;J!44i!*!kDf zZedGWHRonu+`oq=*}Ov9T`jD3E55l-+-&`ErVU)@DxBQ_>V{{LpCEgv_29mxg;6Bb z{oX=#J1c(ymH$|x8`7~Fnlu*+v|{m@Z)z!%z(x)O@wD4(9CiC$%xHnTVGpnL*%w6y z4nLk+w9~RtM`D#^GF!RHQ@ncff&KcS>!q$KP!zcF- zBufL+nXM+N8Fp7mp)dBu=Xi=2OG}PKJAvR#1W}0#siEzb)>$Q;>o~FGZ)x8cjXD;s zP42U}mGA9f%My=gf=JqW7bvwoBQ0^)b-yF5kZbG%f&kp&vr8P{{=nMcM@N$suDw;(}!bsIx8#gX|4(M(})d88x>z&J@ za|MA16C94=7CUL?eX%ZhND^9 z7V>_WK?QG$*|^Zw$P?h5L9%V(FC!?{Qzsg9yhOO;w$q@!B(S_)8IA8 zrD3b6D>@h%D1fKTMuJvtIM~bzV!6V<*Z(GJ6YZ8t(6Y>>ws-z?jRz$L#RJn1sc|aY zYLnOW-oF(3)W}2KpPOe}1Pvk??!)f3Q?^8iY%9ENd^+_vq6p06z~(@)FNb0h`b=iw z;*W5 z<1M%HJnP^d(n=TFW0&W7JJpc2FkSWWtTTp>AYu2+aFGS}X{&l;3*X6c@7x2)h<`M! zx0o`hRZ6145w0#|n0y?~5n62Z`X0(kp>Okb`3~Q;neHofj!*$@H;FP{GiZWk%=~E? zrQADCYGmSPv5gTLInec~*;JqCcJ2}keEroG3+-8Rr?>UO$jn;;Be>t16tOtAEA58( zy8!W=lwve~$Eh5Lhc+sZQ2MM~LmS@3hjhi2t=C_AK-H8+Lsr?_%XC>QAdOP>_&K=_ zPPYy|5qyhIs`L$zlL3?mx-(J2#;dL%=`ka|%c%ddojz-&C1oRKE1d%iM7&TCvO~## zhy4>NPQf>&r1*$$m2{9M7Bt=%6sfAI5|b0dz9TE}7jQ)MxJ?tn{chZIOct)Zf%#Q$u2VTZ2c+LH|199JP-wC@-i+7Y20 zJ}`0WWp+5aUxC!KggcQ>s-Kk)%76aPBn-%m2;5PB3`#A`N7g?*)N(c38qeo0r5YmL zHp8PLg8O0V+uO~8ONUE9Q-;0L)xN>n!V_}Bi9g0uM5N;wqjre0)UYmYR!=h5Z|l8v z)h-w2d1qIb88MuCiS>|<>KS}K3Z5sSonh7|3}@vV1dkU7I6|kXx#*Ko0%$`FSi!O*|}Q>pO=H2!CYLnlQ#!fmaJYM!L;)TxK>=6Kw_#p{J7 zZk*C*!h`_Q{#!s#6XNQLI8SB_E}UUzLKpzuYkcL*O=7u++LLc>H7BX@shkMINAOx8 zG-agD+hMcQ{TqumgBeNrpS5Fm2lh3;iHVkC4WMSQ__u*HIa|bnOc>Q9TeH7UjroJj zguhVu>j!o?#>GUu_h*UcG&T)fr96rzI?BW~^(*YlTMM2$C{^=K`m1j8Pvqi4n&wKr zv;GSE4UUsfv_TUo&WPQ7GS=>{s3NS5di6tUWVQ1&l`S#XaB8u;=E?{FV>)U}g*jLr zRXJ21>i^;}u ziV(^}XBiqZit(Hg=g6`YM8GYBahn=HYNJ0^UHfqXfv};)w;L^b8|$*3g*W+;x@*H* z`fW*jv&~B-x?em9Z5Msf4^*N}!Y>bRKI9xAMY{m-Jjg*w#<*be{jslT%?HrxSi8l!^yWj;n z^>>^omiaYnREmxdl1OFn}nI_wKLJ|Y&qG&Q!HJVV~cjJr^0 zkCzK?uC_h`Gi}TJTAi336&2@zsi0}D_Cm*e1a8Nxu}B( zDxy6?0^^&fMs*F* zg@OW~8mz6xzh-`@a(^V@$Nufb;-S$3zVkw;SfGu~;ZA z74VLl8I#$1{P~=F>-UGqJ9V z)D$QWS05L2Dru@Mn-E|VrW1g_iI0Wq@|J8fwp47M`!W~UA`rOQwIN*h?ox1Xd#E3C z817Lma8h>r{!s?veDsQMgL8>hmZTG#ZfbEhEW1-!f@)! z_6%G@9j%GmV~rm7x6cZ=u!eshV-};yH&?EE#tW3#veTqamCeTxVa6|3wq`{y^yvv(^xP@1J1%xZx?c*=cljbXtuPc zgM?U6AJ-rHGn#!gqL2Vh8h`ZyVXW7-iQfYmJ>ekVU25(k$84ssoJ%M{Gd7iqHeGxh z`%~&y7^3g49^Q-!iS*E;4MA%dox@&C4|0~xfWHw$Uw78Anif&E4w)}R zu|;I_da$`#FKJS#rk){6ip})u_dh*M{}Pn4CUKUh+@1sj`_0ogO4zjq*3)rqn&>WK ztgmC}IdkWV{h(6?_F$$Y_B?bREPMjVXiN%3q6W%Pki8rGpq97j=E-T@3@Gr!B24cG zd{30Gm6L?ymyA3phy(`(9Uj&AeqBZF`jzW$6TLqq{v$Z%WJ)q)=*Ulk!Qh5K3lC+L z-*L8LDN5Q{2BFqN>Ha-kvg>H4I?tEdc5zoR}XgMWz-je;br=@ z@Z$_AzBl1&{h?mW!=f=yvbs1)L0U0k#=79u*U$-u^Z#1YI~J8_RDq0|P_DQ^1E|{k zFw}niG(c>(lGOAnB~H$V!c;Db(?r5=L7A|9H>Xm!HD;ne2%NhL^o!!dbn8U=FIo|_m5K*LbWA2JKEwL zo@O{tS@{9n-{a*-TI=^K4@A+sY8tK%#smb*=gIxzUoW!&cO8S>z?{2;8M9k47aphg z1QULDfkhn%rCXVt-Cm}GN2a%ELp1tyq#v4daRaXlk%BVb-y06X&b8;S({*|euMVe~ zG{Q%b0`C=S_*G`Beq>n?C@RLEI5E*Q6Y8#+T`^o4jV4LGOOkV28F>c*Dv+B*^&@(V z7i@9S(b>hG*qnU#Ghf+~kRRdBKbhn)X(rAr4x%^R?oH0cciv!i0%3xL=Id?kGrg{3 zMW&h{%n0|@&*?|)8L|HA}%)KzIRyf)pmVN4+>M)sOsF-}MzXoTTLI7EvzypN~fvb`w zmA&FqpxmW9^9`@5(e#6Ox7hD)l~mT4hqrz;gM+_F zo3W|ajr>nqqpv($M)`=-Ic(-m>T3F==QiUnPpDmCZNG#i(SK#pOMEUQEg);02eQ4g zFI^$WdOK|uG!AJ;blhKPR@EAmIh? z@PJl@?h9u1P)(8ovNe3}{ipNZbFHHP?f9{*L>urkp)JjOQIa!cTwlKr51&c7%vY!m zCpe#48d)OxzTmY5uLZu^PrT9wj`T$S#MCOQ*qym`r^>!KaY57*@c5-FDPj#aca3#a zw_Thfox+qR-5WjIWS`7(ZIwUyR8;aUBkaCAvsy_?Y<39J=hVi(Q3gFnrV78&`AXR>PZy_gFpg>wXJ^P0D36A*LeNOYe`9^QZC`ownM@Z{!#3)->%6q__ ze{%dO?a!P84h3c&D!jl9{JziZT5`QqQCEjZujTWx9K+Ay(`B;0nX`~Ly;WEDDKKmIa=prUNxRdH!q!mGPod2v{dyomr0Yy0|xcB~t$aUUkKaR0s+Bt$tQ6 zlrdo`LB_~*dSPCnK$5B(vnGQyjMht%JtNlxi;3I&mz zPpUUhx2km~UM00+`z!D_>n9z?Zo_L6QRzC&*2T^a-Yr-dM4`21D zDTC;i9^UEjx7+tQ03;hK&tHD+L|8NPMQ@5*q69SId=}Fa64|?&f?}1UaA}2|6u!Bg zd@FQ#!nfZbarQVT>t_$ms$ko@jBWI0Ts3QeXl15InpOX8F5`%f9;ka?Rq9mY1nHlB5wie zg)LTD4bj=_T_CXDuu^aXd zu~E91c=37Z@nvnWyCi?_12~;!Q*kRwcr?~4WM?x3t@rxD1N#GfLp52HFNFoGKGKIh zUWtS2UpH|K%1G>AR+Eb=bWUioi(Xe+wgR}k*A-EnQ%uUMEeC=Vnx~{e?)pCD4Kz4~ z{K{Yt6jQsUr4lC}_g>rU-%;(`mj_bsxa;ISxiID(dS_Z77SBA@Exc;Mde<5X9b#qH zA3q!;=S3*9AA$5TsT^Ohrt{){waJbRlv%_)XkoG3uDllSpTSYBiaIU};W%&@(9oAO zDpL*8ZKC8}pYg-3sFv*(8k4#;p=)CW#BP3>blMTd9GdW7XDvv_@LN)j%s?$O%JrD& z%4H;+-*weFViV&aJXDUtZJQd30{xQESF@GOeqf`PEdNJ^7)#8t|BGc{ujRW~G2od} zPjA8JG(wxhZmPAb){7h^!YEh zI$iWTVXIPqO)2>0q71Y!$eCD5c)!2kIZ%M{0;6bL#)P8zy&e_b!N6LdauJE$$4^48 z;a=g~f=cyrMWbtz?@oVVCswV3Z$CspoIdO{KQVhm%0+m9r-M!C0%pU20W4p<{zPW2 z(}v!mNgJQObqY>OZ&`ZQ2(KjA>#I|nM3PK-<&UvjB>v|s$ZVC0kwdrcYpjEwG951A zNM~eSruOz)JG%#Xr5%R=Xggl~iQ%$;@7+U>zOFUGHL_V>PISt<#p6C7Pvz@zs-3^t zXTQbEdrJ3?R)}Z2bQ;fz?+)VP1&b5!CodAU23WR}RQyfW{SV6X?=p#)9U5NItn5V) zPX?>;ib*c&X81zmdjTprlo3K4oWJN&e=&ppbwCp-<`Vym=wHi@;R`s=O#KEMa4RG|28f4$@X9DD)XBcd!jdH-u}{rla& z|MXP5|Hw^1eeW! zlRCdm9gUL%coRhHuFusAH(4+A{4Q_teXqzSnTH<5iF~FpNac2nC^M`!%x?CcRw5$$ z#3X)Y0^T|!=l_{F4gSMe?$IRPYQVlEX?giaAgi8x`1_PkfY}{j<|If|qYMx=KiUsEnpXHjPvarK4G4QYoO|KKrh=PQ!6z}`Jbl*)L9*f+Jd@vTRtUc(T{;K?CW|8|7;1fNhrvpq+s?aV1#HEf z#rv%Jo(W!`0Qt99E!sP7rkASLV%hlXp=lK<42s+LbQ_%U0dh-3%bgp*WZWnV4zVP# z8~?wL|32Pt=4I@)kCp*_LC+ZKi+933k_J|u_5HkvNcaTQPzOHIC#XYoW#y8YHhy_lc@E)Sde1&s>@nM; zj(6T19U7e6u}FLn9GhZVuE(25cCMN9OeSuqW~+Ln`1JjB*6ne3xY4N>5X7}YGv@#v z9gC`3HeM-TJUWL}tlcMN1Br574SIt)9yWU+`{fHM`J|nlrlMF_c|o4WOD)|Jjq+dW zA~{@W+C~l5jf26B@&ubNLz=S6A`S8*EzseL@D{g?(K|i2gW1v#RbjI8QzNIhQn(eh zJNyH{4)ay!yzuM{I>mVH>+wxd9{Zun$>HtMHizRjc84(_kO$^#n zi;y+x+>85)oq(Lsd$>J0cC=e(zr^~x+207$s6pi80eU0&!kauVK<*5f6STqPwImk{ zqX<87f)tKR)^;X~E2bCYuw62m>x1mCfri27RE(N6@fstePt9(w00dEIp#tJuPu6W> zLVD_9r_x?v{yKY9Xw(Ls${}2}-fIK{ko5=a1NLGsDmu6|zrI5$l%N>mfNqQ!(Ou(H zjlpyZ6jmbr*^1qi(QTXZZ#zPw@}LZZOj;~20O4BALEV%;HCSUb8w{yW=P6-sbe8IU z$+O0zxIHn3T$$4JHTS>0oQ`Et0I>%ynz0-Ap`&Bpa#C%xW4xu%#`Mw= zXV6h_?h*b4K<^&}OnM_90~E)hfz;|veW)JNr?It$$k~Ylj)`nz{x=h<pHy(d4m)G0aRaONQ0IW}*8<=y zCjP1$ecN5Ot3loUGAteePiOa3z+d?06y;hm$6G3!jqE<<5pV{f@y>A4KW#Y279Ue2 zHFa;a67A{pyb}joS9aBe(O38;644aA6yN`dskap;d+k>pyK`mv>s?}vaUtWFyw6RA zVY4a$F&oKI#^J{(ZD#_RT?{wo$Ib8PzqMRzD$7*jcWnUFU)M~E+3LY}uGG7swuimD z=earwS5&BEk~x7Aq4Mg+SHGSrQq!upAm#YmC2P9F;&jd-3dKZRHiNiv-nF*pqT-=L z0DYF3vsU&r!rpe7&-gHos_&KY2Lk>R$mQ{GkUPjr)d@bFi;Q)4&KMu_W4%0K`~hXi znVscxoD=(9;$UM6gpur1qt&)=uzmWSY-wb4HFQ@JaP5U2wOkxL8I!gMJnJohrM!6b z#I`x`$<;kg0QqCauW?`l_^;ne%wb7V&>JpsM~FJA{Ss{Hr2nw-CVP@g&)`1p>UsiY z^_J_68I&>D!5zgukZ9AGz#Nv@PP4|KG(NwPeV+-tb8NlzD^N{3el&c(ud*IgqX-iMAt=*t68Tx?X=_tdK-33rgIc|OqLO9b0xEi$1Af5XsZ zVMR2xGAI<8!8yHVnDQu^f_qBnsf9q>>)e~GE5*zusN$5No+Y`we_!M6jM(%Mt{C7D zyFwbsVS(!t_LA_|2St7EYnrsyrDjN!N9a+Ulc|7W21sIeR(+%ru^#C^-4t$BKuy97 zrO}Q>dS=W{41JM$AUDahekOD!_``m|sLe-8L|zq@oJj2Gk%z&7S6M&vRIK9?DZ z@WySjmIym^*>n0|1Rw4!?1W(yS6VrxG|)H&l>X0%s0SxdaxE~FF_FVZ?96LRM`Uq` zpi}+1M@>f?snDyot5ZtZf)+83R!p!$*wt~tuaMynisaLiu6uVK&BIx_)z!z=9iJth zahbf;J5atJy2K$q!(1FtE2LM0zVm)GB_ujLR9b98DGYkKf7Ec8djX`Zu8US#{s!zg zB~^wZ+10W2km0me74PX>+|>?wc9$P&oeGp5gT_syb`!9*+VjxcjFYD+(XS#7Z>G)dt$r_59HUqb~~51#(>S(6cMMJ?4J2lMghbY(fvhdWw3(*IGCZ? z#7;SjZ-~iDys|3FYDQHmLSuw%x_UdsCS0?V)lmE{%j=ks7zHa!(jeP&VS4}|1edv% ziCySG3WUfd^qXIypXj_|lB~g;m?<_`qf+a267*3&J%v-j+^3v&cF&2%m5kollVAJZ zJgzQD4mU#dDAHo0KZ8ovVjjFv0Ed`FgT5Dp7~=n=2B}Gdx{n&_l19jNzq}YcG_0P7 z3^so!xxz$Q3b%SG(^6Mq@Z~bC3CR+4B#O1s%vI1fU94dck)3!}eKE%ju+QO02bnUE z!cy?ct+RMU)4%n(e?=bQ?5Gj1#H#x^bn8v3A29h(24R|}x~fu_V^!_3sk~qE>IQaOrk?seJj^)h|9tZ6gIid~q)w;dbTo{f|PJaw6t6GV&oKuI# zK0$M2_ju6Bk@&5E*aVf1EVx_jVHP*X=?W?D`p5u{imTTRt?l&E5VCe^QHzXGl|vL4 z`=K#G8DcN)c(6JfllmutA%QTnVt|@7q5P<xtp&~JcXNL5nX;lyer&P3TJyer4C1VSha2|3+A%FOe8!~vol1rs%OL7x-d|p z9+@T@=^GwqjcfR2BM$BGO}SzDr`h$Y;P8z-k>xi(OrB{U1R1w6Q#RQ=S-~d^$Gprx zep#nHTfC<^zVb^H zsCbxAXjb+|Q-VBt%9_xE>K`~(1W;EwjcSbN-0=1^a>1>1s?8%guT247Z288=THq^l z3i76;)+{ksWri96oRm^qHu|%jDHTJN%fw#^EH4XB5xH^Dj{#lUJ-x4^Ciejcgv!_* zEBl6Wu^cps=feTCLuRQeR=LoUYe{VTp+%)yo(&a~sDi^J-spK0^H@(0zwSZycG z9X5oCI#gMXK3?NC33m6eT-9+q$J)E;d3LR8Z#rrQ2Sir7-t;gg*1Bi2 zG&91@>c^=yZ|t+r{v7cR@<2)2oZOQ`Eb)6DB%5#chu zDw4S_R=*Y79m}^?Zoz$@{CZilm03Ac^cl53 zUq@Q#zF+y_VCxsStL1j-wV%VEwNEjQe<&=7V-!65doUoud%|uum9}S1F zVZTn87~uqoG^=;rVWV$z__)WEz>G`1M(ul58yjM^8nZ=tbP;ze+O5W<>~LUc9X8?1 z^VHJJanNBiUxaUA<(Uj6H+ve=n>>!xZh&&G<}K6uU?NXgPlL-~O4QNC9zM8{BFv^> zCxV6Fr#@|cHUX;rG-awnn$12=?wA9^gdH}@vyN}9}iZo z`+4w$>$uP4G z*0-@UiSiK8fo$ytAa1qmm8;(`5mV12;`}^{JaC%!_ZBYtdFqTjwR+9H5rTcsqqazP ztE0dO5_zS**}|Wl>Fl%*$8iY{soE_ScWelsI(?(z>6$7j&g`G?WM%>U9vZYUKLTgtcwy1|0N#uOyS`=PcHS5kv_d}eJK6Py^H;K#cOIM{ zfPZO-b@PaLhsyb~P4Tf6rT8O7A2m_3SQSOr>PEk6_Hgfj-Uz6+>d6V17LW={oW}8r zQW25Sqz3cPf3qpAH{eXd-0>x#K9IZl__HI%BlZxvcLFc992VP=g`Nw~&hh`bi8!#M z4kUDKt(v{v1EhYko)IQqUl!vP57^{G2g>K&=1NoM;RWw08&@V#AkWMXovU4+epEy! zb1o%P(fMjY_O&+f!TL~2I=AHr|MVWSs;ZX7_G#lFvtzPMbdIT%qmr^-qgU&SxE1*6 zJOLg7ISYXVu8=}z2<_S?lf1O7x3|Ais?Af43hhz-xVXepxzDo|EyE0Vyz?3vfdM}% zT%Da?rq?_0PfN6ZU@4koP)=thDNUyoa#5cuA@SvSH?704xb4*pTik|fk%EM36*D=f zcRVY!8=;3>ldk3R3XKy^Jx`0YL#AvZro6Pn+=+T#wt#mr9LE6fA?_&ewwtmV^;8Fw zlrI7dS*>Z2PsQn?ezjZK%~cuDvkF?rjx<$C^;$||OaY5HutT>qKYQk=bbop?zn9xi zt3n&90qSn!wdFSz{}W&cjDPf+2$f9KGA#DS@bJg&&{syhmno4Jw?#ELI8(dcfST() zMI7qhz7x zh9u!S7OrklDt}uL{}so#5$3uWtPPNYdmcwD$AoLUxJe}SIz2*uBTkCsTc`#`>;>f~-p(Y?q z6nyDYbX?Xdm``Cr0$ZjvGUKJenYyMSIT(y4bx^v6S31|P`IODto$FO$>oJdJRriPM zS+iDGtl{IR6^wpF*={9C_~s}Q`hWb1mt({}rF2DXDd_s73j0^;x48_2D?iPx`UQA6 zDI!%@qqDp)I|r_gLLR{~nZu092t*qQciyHDeR{t+$Vk98m>h+OEC!?{v|ZxXUU%K% z@3k_8(W!63BWiU#gNIdbF@*2y3-T)B>d^dPGp*Nv=&BNkwE*b{^z0SG$n=EE*g2TY z&ZAe1K)dp7%tVEbB4F6@#nfX@s}Dl#QD)s-s8d561NZycSvPjwii9#qLJb|y6NNSE zWIc{4z<#eRhYe`}o#f=?ech=dLs{(TW^?2h3#h=-t#M9GYkexsIQ8hmO#$1{SJiv_ z22_u1cX^y;u(V2wQrwoqj+T8eflfgaMVhiPoRG-OdgZCI`Cls`TKVro2x6htHu{@d z-Y$vf6TX-SFTM%o27hhdo_LHl4|dxProGwP3hgJkTD~m@k*n&pEo{aHtybQnQ!NZT zNO0`Cu{(|Qc1&+_e5+o{()iwzKdT*lm$tRdX-TmcDXnDC;o zsZf~=TPmqYz;cdk62dw)oHL|(V+(BwGM@V4DUrwY;RHO{7=+tG`^rDBXBkO-&d24v zUH7W7rBFwLH~%yxbs2Rn;8%#v7(ssR3rbU33^D;Q3GghoFdyxc^V6nnIQlCQo1ZAA zG9`NPYw#gcP;wt639`h;^ zfJ-}ZFiBtDuMMMq%iQ<*S5R>d0h2$)UAnCrrN%w~o{A{PZp_Qy`Le)`wMz{C+na^m zTuj}*>o7Mw*G#Xr)99wr?ikwgH);+?)#PXaR3%yU3elB|n+U|JcXI*J>pJGRJajp` zkiEH9*o96XN??)c<=FB0C;zhE4WSD9;B|K}iLnAXR3xUs!a=jen${+-N8g?xOZx5| z_pckZOlpL&4#48(wfI)nNp*hIfZaZ17AKm6kAGQ}u#KaZd!}v>Zn+^Wb7p8rk6K(* z;N~s7G=o7BeJ;)cPb856WVR!=VE0ZUvtK4A1Nn-j%`s=!^L6p!M$uf72b99+SblCN z9xs6H=#Jx^0=-pEfMrmR;va}?Hum%LMoUyJA9c?P7we_Dj4^9UJdoZpD?&D#|7*&X zv0Jsb9^c0>jSVRe7waJ6C_WF*-#8Lb%`aj**5ac1(O`U}uU-%Yde^LqS@VcJ@ zM6wPJX{XU(l2YEBa6Kc6-nVs*JM}mseHK)~cBj}Y5|m7h+ehaI?C1F!OWg!VHSVRY zhA-=mC|nQbW<3l!73;M>et1*h;`Hpkh1OXg%I>&Y`~5v|1@LsWG8?HGCf6R6y{&>? z?k>2hJr7FWZEiKv9Jl2bR(gVaC>nTpFcOa@xnve@yS3N;5FYyngAu@BwXvV8Rpx+# zOm)r~x?FLIoj!4B;J`@)8JSKj}=aY{6)u7;K?F-n(YM=s|)z0#g0G+Jr1r!-8D;3^7Qs~ zf|tv@_o4k%k@&_Q8P6Gn^Gp{hHK-V^Ml%vKN2x&9GY==Etr_EYzYt!~G8FbW#l;6Cc4XkYw=t#v!)YFpz?yG=?4kGJT;n30DfZ zr&>N1lkzSw!&ZL@v(rOs`+B)aKY4e1t`R%<5M!a$yIqdV2^ioN&yArosrk(b@7H~w zaDxfs{tuOb;Prb{GIsLcrP_rOw0MoNd@ z8Nib}98qQIFw&N+dhh=OUXCEV*xFo@>_crX0#(GTckEl#8n3)j0|oVc9Su+IQA+Io zFXw5!lY61|~KS`@p=oJ|?s)~pP-VXpa7UpMj52;9< zKYH@SGeYX=gtH#~Sz8WHyI1%wG>zk*tTt-wn^G=FED0~))LdLiS-tyYIjOCP=3ix_ zzYehQ0LPuqm43~C+`vn;_%r9Z=tiiz;`4`rwBP&;6YbAqg^x>EDr@N-v-HKcqGEm{ z`~UrO0r()MIc8`_{T`sz%zIU@=^oj7cxbnrkKp~8z6YQ={I(5FK%f4rF!|>}hd51R zUNHKf=dS`zu&VpxWJ&;l zo}r-q|NamE8-{;;YyTUD{|&?6zIgwe4gZ@B|CV{Y zAc&()eMU#IKJ_8DTe^RL(Ys7)tV~DEW}zroMu&sntL-XO#(8TjKcRkUn|@_Q7PY=_ zrq!N8;GG!8Lu8kkJJ26teJxJ(@a2iS+0W!W^J@epRaook>PhIkRn`6&y`+xIlg;(T z^Q(U4HE45|*{zo=V@~5I6Waf?XNwhKJ;#>XT)1?updJ_TY9T2N;Io~g&^dz1;rju; zvAD$n8;F&qwRK{8wSs7>wbz5>ca#FZycBm)0qdw~%ED9@9WI$vHtmLY9|7}s6-P38 ztgJ(j-0sFfBLm_@w&>{0>x44(V&^$zQL9vQWJh%WuL$k$gFqDmW84}Q6^nF+z;p1$ z>*Gb$p0(+_aq$hLp2hmb(Hhle5$tlJc)Wg)5jx~qX4uLgeS{lSq+9#GB|0SYugK%? zg8+v3=b-PW#*Hn!SU>}h?I{65>d{r8SyYK81NGag2Usiybqh}FG;u<+4nFp>EM40Q5Uy;z^IjscDO$p1!A%J24!Kd^q3^NBZ3 z605Cd@mG-RT%A3uJHI_kv07m)K+@cq8v4Zv?!P)CH{YyS3^fjl&LxqFAnMIm%G5=9 zp=bT0Du&iL$DlbErcsLIjr>TlyrcSTPeZ@{h4Q6*+pSv-U}XP;gn9J04d(lU*7Bc$ z*YlnfCw9sERaS9Uh{-0`o!1BaM(0I^`~)?A6Ge_^1(gaP=|X^hp9!X^gLw;0xpB=d z0|q{J?nd5#!Jm*P+7*<<7~##<1fh<>{hEEc=g9D;L$E@pwQDZF(-pDX$mQ3?UIpZW zCx(NW%*ma3>cyYNzVwQ`(PdpMn@)EeYQL-Ca}Nm#87P8;%CouNWN*W{_OWry7PQV6 zr!{qJTlzpyd-PZE-#t34&RXo2+SCyH^Hz}oKx5KGpwXaQ7~WpK_^p?z;&UgUJCJ9~ ziJEkR!}*XvVvIBL2Hi4wyZ~uav6Bc;&idIO`6t?pTlOcF&ky?@`8;7a)Ti>u$>gzF zcpc9;H3Gc_iMQSPMfX)GWm56(ml8f}|K!+q3HO;2-MfnbEv$$;32!878FF2vayr*V ztM{?$QTgs2>hn5Gh-LY=rGI83^);XwR8AL_I1|>?gy`%zDT7Z=6E<2`IbS;b{p|cZ z+<%S*WFrr*h3IK!9~v6K+&o7gbDbhHo81^IZITtP_VDSUEwqbGqIvGU4Z#hET~UyJ zBeg=$iTP^x>}5=+U%kXlLqOZ&!x_g#?S@~Dp2x*(1p>y4n6&mAWwTIxx~2Y)5&#Y7 z%@otd)z-Mx*XtAZ-_1luxLQ#mh3HI=82s|uked6p)0$U4hZRetlC)So=4qFmL1CsP zmiyQH&78+m!Uw7I+MR-H6o=_t79~Kd1~KOhVA?Af{z8LIHRyWR87fjVbb7p; zEAe6Jl(hP1H7tzme6tz`+7ei5^*$H6l37}Nal@32cc^e^+zKgBZr7rr%9l-$cDG^U zw_A9=*xYU%>A4Y0&PaoGcwvdhQ>Y>gt!q1mp z0;=C#O}!-vhdz^zts*90vzf1G1-iZa-3#W4-kGU1Lm2ouwqNP8Lx8h!hDW(GJC0wT z5YM?cIyf#gs9qt+mz!XED}1gFZoo~|d!Hxcn&81S6);TRjV^d{YOIL`FtNN-hV))_5j8kKWghpOezbtk}_?HO17hWoi2c4(2BTmN+(BYPvH9lIR@Dh#|UP~BVF`MY&3qw5Jo{h<6qDqRkbrOrn%hr_b!d7xp=OjPuqcN)Jl zE4~ITsL!TbY(M6{)v9yH@%_9N7C*k2O?9hDBILqvblBedg{P|75)7wd zU{}g!ap-n$t2BB#EKWq6)Bcc6u9L4PZ_HLFH+%+4Rne}1EIIE_}56a1%I)0k<(|k$^5=`3S&Ad1 zbajx}ZQ(>O@6YvrsP(4oImZv++R!10*$y!B!SWY`KEuX|b6+!bhJmpE(l9L#t) zj$RU;%uelwt~X)*aBGTczrHzIYLbu09*lK9AOM_^d)e5~{46>f-1SeF&-EV~H$M;}f5AV^BH^14&A1 z-)D&L7hSy7`Uf`kXa5c7J%GW59!4T_VWCgvS$HFj#?th~=WODx*#51MmXmI~CoB*` zLRGQ5gT=^RV-9fUT^Y|5kpfOc?JC1cz%CUwj>CqgwKR;mHLKa<*kx^OC27;2!dY9@ zFv}dzje)&F_7-qrbac!(*Y%tezXtjMr$x7y4`ab5K5gft_Q!i1td}PdWLwuEuXL|- z0S;6T7>M>bjlT*Ad}G9d8zjCv^&6KNeaBQK+mB&9Ya95{pD(*O8_~%k%@YoN4~W+= z&?AHuqjqV+8<&35jQ8L?TfVKprJBE@uT!hn!BM*?@APw{D^{(YNFoyLTHqgI6?5&X zE(Fj|LdY#D_A}9;Z;4~u(@;)7ncrO<5-ot)+=w7vQ~BzHi%S(>ZFC1Rl`VB)i6VZJ z=+)z08}%eiU%EY1E5bl7eUP^0lN-c(Q2Vh;)B>2+hIa2>-W@39NxkfSU=l_6h1PN4 zs|T+z+Rg`kC#tvcRS4Adl)nuH6{iYfH%aoBH}IdFuTT!sRjUnM&8OlN?UhJT2gW_y z(WbP~7P;?tFlVVN&~}2}P!sB=E&jK6!SgCZQvqsRO($Q=92?;#GQ)m^hw* zw9g$(r_ytFQ90;b?nCk;1lpYL0jAclcrEU?Pa_|p-+O}By)+FU`N{m*VcRK^FkXbK zzuCr$YG2TA{7O!@0BU6xPNIZCfq=gv7C*7RhuYdTNUK?l#;(p0@L>53Uh}5}IES=_;He z-&uuK%guu#RyvFuv6#)!(gBP$Bw-pX^X0tV3l6q?2d#+*fZ_IC7!_m2*|Lvg>gb;v z*gLZ%0a~NoM>U+@&*>A9*@IcyUbjS-hXeq1mM!(IbA7Fum~u6uL!SMU>!BLp-4U(O zSSv58`0@~lQD=3mnt?n zSo!^gE+sT`mM?nWNSd_VU^}99o##iLZO56>6ho4(j53cz%2vEw#s^@XsT!xO8tsp< z#PCE-6*2^@W=ZQEHwC^rz8dm#(8d|tDkw@(*-)zbUY=>@F9?uWenk5ggV1$XwL+vakJ{>djswmfmk-++N--}qe;rwFHvM8BJeO<0l1M! zwXuAbNDPfgSpgNeBvs!H^-L#^8Db5FlQ@_h zMx@$r?;v+`XsU}k8P%suK1^@64a_iQEp?gT6;JA%_YWaD1=wTUjkVe4T{u0Erz2EsK2A$@s7<`IR-@>{s6+sP&$`O7#l*|4$mkmNs)avh@Su`Xuk}- z)%2<;Ujuq|VZrY@NXeBatTkb!LxZa3urCPbt-MT@*zWsDSZehv^@bnH$6JyA(+K$q zqeK3jNiqq2f*xNlVk_MB)fb67bMk~|I$|sSXc9}S5hiTPX}w`-;3*K>BN&qW@}fCJ zy>}#$%dq4=3*@titik8c{E87qwqK>jcwgb8a4hL++j3BxL$E7=|9(9Rl5^N;htol! zJsy_=nIRLOX6w=v9N|NtbG`vcmk#)WCy~+F%Mu4woxqd$@#O)XZpW)+M*xe=R^9ux z5;dpyLuo#)$#AoQT=i_N`EmqLt8T+7X^ibm(qU~E)|9RFMqi0y{a+&HKk1?eE&@5i zOO*Nh@a~CJTZ^4B^gZY%pDdBhBgeB%#pSwG7jV;KH>g$6(;X1XZwx!z6aZh|opY~Skxxk}eyUit_xhJ)m$6swa+Bk^{>2V4q3nBU zIFEY*c4L3}R$9r_#n&&*+C9J0X!9xxal*YOw8WaQ$#+ispP4^k^KC(Umd8kB8HDT& zRO*-Xes1sFoSt>+_j3hD1>-Tt-!aIa&I-5oxACPV=+DGVFgg?~?^E~!l#4#za-{tWQ~dIRKW;Y@}jrsODP>{s{aOQ0_11I=3V=4KVC z+I44@k0Wn&CVybvi4lM%4*Fq9Ccut{;_4Ld2iK`2K9xkZqG-y=S}v^xT{^9^0*Ic& z5`}))waGE!ik7uh{+d2uWKUQDi-dSy1DyxOYUE`9dWTG=+}gHx46szf4gcwDRC5J+ zS+d(Bo)jWLJQLaz{4m~NRH9hA{nhsN<}DG6dV^gYJvTAAeV!{r zm(6XKg*)EmWo@XeaX}ZEQX)(K+q)WzT=T zBT_WhAzo&o;j$SRvl{6?8o##mgQEa}%7i*p%N~wf2tIOYxbe|xw&*OTd6dDBu`e{3 zB{6O)9IFD1k&8oiUW=YV3_J~hmkv_ z%QC>hZa|VJgjIkAejvUAq_<-k#xa*yn)AU~Gn#JrK zzmHPog$J*WlS^A~Ml=rQeEr64Ms2O4skWf>PMkMm;x>Tsg;U%uAhH}tsouu}a{={^ z$ZpYC$Aejd6yjq&L^)uuigl!c&Yf;~Fb_UhfTT@H5xJoe3x(aZlhP{h-!*D9DKXCS z{{}hdxMSqOJY2g=H~q3t%_Bu;p=*M$^d4)R-?BTBC@g3y(e)O>|iKZGrq#PaTgEC+S(0$wH5bjXx}$MQfcBFm|B> za#^}<%k43y|6&4Hrw~7K_XtN!;qTyE&fDZ#SZnC=nM+Wt{P8q5tvS%J^y&b;3=aRc zjb(hTJ8!;M-R*8etSzmfFAf3K7KMtRapjJXNal5T7_Pa?Q(Mulal zusu3U1=s7(R`{Sd0!C6?j<6=VpYGQcpNSrh$Rn+)vq_$b^_?|mYPEEpL!s>qVufFW zIV^lX>7B+@rQ4d=qhshg@BD&;0lQh1gDqM}(bk&g6Iu704g;$Rg~?p%DanM8L#m6l z6oKhD^xcP?;{+a31Gmdf2FKm*BP)EwoZ&}~Q$t;joKlqLI|&QNo9u1Z;t{8Rr)d9b zb^QCY7TPx%D15=>&+oo)6M%T#9`utzXD9qU7mOCO7*?ULP!8y2pKfJot5~6GVezAmu1ENq=zS^x|&Nw%56V(#YC&kBbR3%59yYt_?3{TY+@6{~$p&yPIp+w!9aE z$jr`$ptQsb1|9fqL0=cf_ktei<*2!m53;F_x5A3QrixK(&!--A@cI~U9ct%(&TZS7 zT{5cum%YTlk}7gS4?Vz}@X{M7Lvv}?TYn9oKFvckaQgz=CYh@vtv?t$43Cz|?Rr~h zAO201x26-AdGG(#=_gO}$PlB?{KYY8GXgmKZduj<&O5cako*m0QJx?ZA4N*^&!FO?R#E_&qX|5nRk}=_gFX1cSg#U$H@nrcG9m?n#g;n zk0-hdh1Xb^vN))GCJKUQ^50R&pPB68Q0#VQ^tzokAZ{jse7v*2Q7BjF$0cHkhy--p zM0Pn^h5z$T3WxDV;*3QSS%-f51wWL)Dam2y)G^MLqhYsy8=RT^N7)em27!>Wf z(F}}eX-ms6t@Gt4p4J6$1InLg$a#MnecBnNgqZ`gNti*GR5v|P^u9WLlC(Sgoy`=O z#u64XBqc@EkXy4cx|L+#;z;tBCiJb%2asLhyr*qoh1asP?iQKej87EY8P;(5)!1}p zjB3%S{HMqgkjg8AGxFk-yRWXbVwZd5JIAUM#S%4TGEO^RWW4(|_pJlve<8o;%Emt( z4OHt{Q>)@RZh;dcDb&{x>{n)m%bEV%2Hp(7g5;yL+N;;^-oUV#|E3jBbds$5*><<= z$|)iPcyk3F%~SsShrN?|r>6JMMl2fjdMY|ek3X=yRf{W7hJ2g97%%^DzB%~e@~EIt zr_mr(*1lI>YzU~6^?5+Uh1iYc4AXOQg2|{mqSPLXK3T1_|MRrl$=aT9&_S`L)Dh_M z>=qug=(dMvRW6^cal4sIz5lHmA}PoG1k8nU1goX#byqUPpru=lnB_Pq)Jm8c0cD;~ zL56kafLrRQo!P>-l;73CgYl~!+`D(1Vb&XR6F~@ZbI1Ile>msn&xFxN^^yxSht$67 zdeeDpGTlw#m>Bc~k<++CYyCly61C43{O>^j5B}|;>eDuDGP~Knx~~rb+X!Domxy9g zq;874wqNTi3}O$<&uoB5KgR(Tw83Xq7m!d{i!coksB?UCzsS#k66^R4=r|+wBugq1Ym~Vkg!w_vWe*RLz&Lhz>L3sV=-+#5BBLL= z?19d1ql)=|6BGQC&DLGQpNv}6f>}55{^=|&cEI$|evZ!SuHQ0` z>Z$@@HbP#k0w&vGUh0NEyF7MZ?y!fK{KuY?fb{|w~exjizsOZv)q?BQBh z9gG>j&QTbJ-~1d`jp_*@w=YpUovQqHv{;`FB(r))i#OI*PaN~EdC_l%Vkq(+p04iR zD7Uy|kD3__#q)t39`w`pdxgHbzJC645bzg0>o*hRB7>s~`#b3kjIP8aDWETv4D)=O$2_-R6|6Wg7r(7ltG zZ?s6eXR`fuaX`43pF9}*Kvuaubbix>md09+@O{PxUXGWa;HgMWzbh>p0Ei#91^&M` zG9VfFmjZ4cpFn^AJQ1(n+z{c}T4f`CQdYWa8l0Ww`=k5PkznEimZ z8(Q_-H%tfpks@P_?~j9*wwQw>@xt)IhRl9^SrmL1r?uU;zt!1C%(*&9(Gwi(jsFFnOyAD-fXIB) zE`_sid~@M}I*zFG@AovCEy;k?(_){|Tl3SKW7kUlKlj2TOZX?zcj5B}tEqn$a4ovW zp%B?7z~Do0>cL~m2|sR(iGfm<86?_tov|5GNb8Ma>vj|-#-=a=iW5LJ=Eg-Wj|Kn) z^ULH)e6&8zC|YcgEO;|YTu^dv3;{ZbOa=#^*5egECJ6)JZoLOz@w4Mr6M!3D4Pb>t zs1=I*S&Zc*5pig6br%3*z$jRxr`T$f*};~Fr~ZUVLkgi#Q<)l5Bcq9|UwXs9SlasK zvtY7s(Gf|1y=S<>8KMyrOqz?&FC8;ol`xQ=D6NR}z24|t0SJ0i4;E@QM^d>=x_Umy zWQ8v-RYmCag>$E~Iimg90Rw^8Ln=bQ@t?0xjz$hOIYW#pBkI8leQJ7Ovq`SGCFkof zW3=P+xs=KC*4REP=!*LTaIlv8c{dUD0AxH$OQ9}g_%o>oWl1#;2EkC4iH zR$E`VAjfl@fxObzI2_F#$}rIV&tC3k02*$*`?PBa)Y^#D_2>+TlF7VYp~kqm@`S&- z+)vyz|KJR;(Bm}R)E_x5#}=ENLizyW@n#Sy*+wU~g{Uk=&kYbus7ImRUf3ir0HCdq z97{JYjU#h-qsN-zKNLPR8mQzBJemsAxW5$&PXVprR2x3y{wISqn$%ytgw*u7Z?~mV zH3Pb>H~fhP=b5Q62E#!;j7kA&DxgB8Urm6?hpgjCHsij_W4EPpty^Y&vEt+1@z18S zy(FSFWiY5d26tL15c^|MVgJ5G6PZc0y^7eC$SAw`Y0-~-49~P_uv23gZ}|DFZ`k#3 z5IbQKzaz!keVKO(V1WpB+vZPTcaJC^cK^5Tk%SQ~9Zp534v`T0&yn^5p^JB=OL@7u zS?7Dtv3fh&=$O&(4QavU-ARg6BgfrIp|H*0G2SWo`-d5A`-R3a17}B#7T-n)=R=Hf z=mhXn^;MJz7}p5!xJ}dNVL**et(P9@L`81R4Fj^pK^wzKy+!APUCSYCQVg!QrukAW zMfIaC6^i1;Md>;-uD@Hu1O)V$?1*`+WGKbr$(P;OXXYNSqh87s8|$ZGQr-hwBCGf% z)w+@%jSrPtyQh7;`qO8}f{UB7*7678We=Hp8CamL>{XxkM_?H)1+-Eq-Z)NYDIFS+x$@Bi@vaMv`sv7I2UaJ`e* zY_Dmr=&eT#EBnRI{cPNnz=_UzIB&=?2$07%j=M!s%3sTJ5Ff2@r%ZvQiUCX$A4y&N z3KqNR4sU<#$HMaBC!^_#dI3R==$Fu~3?t~Cs=1-4yGwsUZeCAx0J*ERGtc<JX>+vKYt6|)_K?t(MwZE$YM(?2R#sIilV zab5ZIgC;o_s?8`mg@TzzEVf6~8{+?X|9z7Jq6CaLJe>a=@+2N}2;RBh=oCu@ovpex z?hHb)^V0xZMZt2J5agf)fTK)l+4G80 zPY5UNHbtU=8v01$v@L5dlgSLNFAiQ#Vu^C&NcnQHR^>oLK1%QL+^Ra^AR)Ro{CCdx z=|+10em>+39`R3wscl}mkiVNHFO)Hd{;g&Gk8$^xhfQZ{Ufc7Kfh6R2zvGYeLk=qq z7n@5wvCfCwhDS7!;-dTMKk#jM>J7X({HR}wxx9djbP!qRtl4b~T6eo2*{;(vcLhWr zzl-PI2Z`Tv+8xxPF9KkT`U(vM0}qu@!g0QbW*}`6U}Wxpu~woSMOgRN%gTt~6G{_V z!`lB4d*Sol1or;FB?-K0@ZMLF7FKw8VgIQzXMoom31`y6P!U+j&e6f{i&W|1RvhuS zFtA{dTf@7pN5gtxUqFf&&xmP0_MYQ*=XRo1?0hDOGnUj{1q>xZYLDNixpFU(rt+;6 z$kb=}jrr`xgJ~&IKNCicM7~*@$exR0Oy+b4t12?ac8Z4nDK#R91FtcHE9@N4-aZ<~YVvzoRF}4nWj1bW^o|=g`Y?>F$lT9N$k9K>bD; zA86qrtr^wy$F_;|Swap^LtBS8NW-Dmh|W!O1e;cTAMco`5&u9fU*NqVoAAk&a##}1 zg%r~*L2u@~#`=r?Vinv??aWfSu3!DGXwHzSvIQmb^T_{vDkU>2IZgt0bH+yV#XOIToAg?5AD;w$r_Hj4*DZ5^ zy-(A>0OD4%KivHxTQgzJVFxsYxolkqgMbSbc;ip09fRwo#H!duoImtGfmq!V0?P*i z!U(u`Du!5?D3hNfv~8*aL)a7^NKy|$*m%+2FU@E7TKmKVPiM>)mh7m$&DR``>*mYW z3wT}-2TIro?^P$F8>mtl1wl^DzsO8Yr~Zj zgHOYsS0fDgiWP=Z^n5y<rE0U1?T~jWPcAe5K*$bwHit1gx$LpgoCtuJdkO zzL6>ao~p%?@c#FEjVAxkq5OWy8U|>YfM~&VCIAo=EgXD4e>;73cqhqypVz7ku9J3s zzS;?68lzvHu)#FIt{0_Txm~aDq|+BSQTn9i*aWzO-=(0c@0W%DJ4jed0wyPV*sZRF z{sx&Z(ejj;D#bK@y#Gm-;`d#$%Azrr(TuMjS5 zbF$JhaT8EteYQdESP8ikbOvCt3_W#b3lhpTraodRc9joThYLMZ4sLPB zXXiVqBmlMlxM#9Z{$!Kqpl>Dzaxix154H%!x#%UA=l!uFU<-vcRRLb*{`oHNwU#U)PbqOX101p>>hKFa!4DQ(| z2YX?EY{2sxY*kp(a0cJQ3X;V!Z^S{>XY^u16re3$W+*Zgi|=Ef&9AsYy&MAI;g68c zS(#s79Pj964|7uw$V;zWq$hI3*XN#g$WZ$g$y&po?OU(96{MP!(D|?>97^kzr)gsJ z^DyR#j!u3qnEwIO@Oq+s%YpKHwLKb-z~o{l^7pN(MfIq}DHW*Np?=a74xW=zD<*fn0L2kzg7QGuAH$Z0ogyKQ(uAuIGY7v&jJS zyFVS9x^9FEV-4Ow?16B-QR~TkPz5_vidw@D;U39&EwNRk>HfBJ(++yfkd_heuYDiF zSWx5@|K)>o3f zJzK}^T6HGf`PD2|OPi|MY?T=<8OjIh%q(6HD4ATl;!ki9z0t$4NI3oA=DT3K(d-ej zQqYWujJM61PeAIQFq#+24Oq%sICKhsP2@bfgtQJO_|Lkt)M9} zc})nSeLiG6XyhB&`BP8`YVnPhq^aB=n&&TU;QemDGX)Ci|jn0k6udltv1s zvkbs|Pac1&5U7nZ47);U;@e3=y~O+kU~+-9SyLp2mPT7!`(v<1eT6^l$uvOVus zDF=;WjG>B5Wfe79w!zkewb0)A)>x(fFbymscKH3xnJ^H*QrPZIMbfEr1Ox^$=_IbU z-b6%1RM;=O%~v{y5%O4dfY_Myv5h89=<=1w-6~uX-`e>6NdZ@4$;0(vtG->j$8Qa* zzAKzH3kQU(eK$xEzArE$s575wug0?ll#bThO%(Krmsur+Jgn~A2Lu5P7!;d>F#tO6 zxZ;Vr~hA~2Z^w!W4p7y5W? zhOI+tRsto06kMbqZ4xJCSl6}tlQ=lo5?d9wLnom53})iiVuxo(sVxmai}C^ z>Eh^*>LBR=Lj+ez<$3Z8C%Gwkyp#DjqEU{cj}A!K)CWt048MBc7Cb5zYtWX#}W=u1uBcj3BnxmF3 zmzan>CbNQ?{8g@N8`SF!hNG#hZ`7A!&9+9OWw=dM=dK?r?~24UoWI8)A)I?Li zOk^p#^L|to*i(34&@?5p6g!(jCH`lV2NX@hBD|e`lMvnB!ASi=P@djEJRfY*F!^)e zBWe#?oIs!^rSYn7QWAtwvFmse?14qE^3ADb@vFV-0$uN;MaF^jJA8rA2!8GW#n&MO zGcjf&wB6BE)ZLX*QlaLR#_hAjlOcDYW2Tdyv*;iSLU^x0(r6Z6ir+ zfEDbKa_%}7o795e-cz+B97r*aRnkA4Q&0Xe z>Pz-FrvyCz-@=pQ0h%Ib%15^8CQ3+kz0i@l%;@6G2cyZX2)H8?>h5Tvxs%c!-0t`_ z2LW-U3PiI8?zmcPkN6xjwMKldO&qY*i9rU18Ho@+L8ww0w>!beWGVSxdOokuQ=GoX)2Ql56I`T#2-G|A6d$ z-M~3uCD2$@qX;6iMsE)$iNe08C$E-caJo-(DSXS}B9|kUqW>{6r4iE4zWk&YP0Agi z`ocL3T8)h%RDbX(W>~cr>9#H%gA6Ff$W5vuiY{@ml2@vrPBCnnJ$hzWs5@%&1$&Rl z@rsAhlwvyHAPe+DNQs*!WlIXI zo=#ddVRU+bkqL#=SkfiJA^u)m5UVigKNug~Wj53=4%GKHs%ey`tg(#oL3w95O@jq9 zJGxBm1Q1VRHQnz9cQPf4_h??JqJOr3^bCI5M8(QIYM3pbZY_*mKCSX}ExvEg-5OG* zZIr7i2g<$^CE_+-q-!n3%C*ws!p*ewG-0C77$|C$SRWJ%o$vEVu;qaug~A=YXsx)E zI8gZ^thaic;K~gMcqjn=leDr}ZC^XTP%w93{&tpIGpFuwMy=7g-F%H}u9LzQ;%~lJ z!}pHo@Ht?EfsN12--8>{N&wv?NYRNFlk(BXV2M__DKFx^z;P$Wtlnlp^;v0RL6DNa z;`Bp2mn!x;cb;mauc9P>u*vBJmY{5yU7h4e-ktSk>wfUv{YL-fM2%+%zXyP~w0_S=-w{qZB3G7anmu$6V{)v`;(s1F6k-Z;sbrr46Acz@(-F z{FRG=g$eI^4%=HbtcAny-pfbF4&trL66xZKS;!mw`KM*Rb$nAP!jZH_lF!k+8WjT4 z`I(XZ*LOsWaM=pso5#!31XGnYigtOBH#yFRgNe$PPpkFi21l0=yqSmVJG#?a;s@mQ zFwwU}rkRbGN;>$PVLXydslNa-p>Oi6-Am?=Wo5+sHD(XTq7h$UL+t&VHOlU)YDdlf z=6Aqk&xs#zBv1#YHQK-MXKu^@8>+L(5*afa(lc-)GF+aI`yJc(YXd=oY(m?s{&^dA z*__M}*E>|)$*)vTtg@+l_hqN!g$vvn+#_+tO2zCU6pvvobY6Z@tx&%?F404;&9AI@ z;eVbmDkIpXaMr|enkby^%*LS*@;LOpm;E`_dmu-AVS_`*1&TL2D2O}+_= z#Y;k~Wi$P)lS+nYm0DIvGj_Kus))Zojh=$QeHE4n4Bv~> zSOF`^9PBel;P zoVLeGfVm_JWNdc%@q1TJie{LySOMfV*YQGHcX*ZC zqG32`wwDK;N+da7<|^<*ee8FBP=rDpR*m>i#_n(m?+`jXr-QEnnWkt7jl~ze-_x%c zub^YbM<4G=R=A-?o|ty&*WG9~*1BulIt|WS>I)^r@vd(aiCp=myyny3{MneN?!FRK zAg&QI=Xt+#1v|^r32HD^Ab2hon~j%AedCIF>bVxCnXB`gzV@5ehElp?5srs36+1bsdkyGB|so z-tW|GvpujY&wJ;5iilSXqgaS{j2YsUGka z)lwHmW@-OzGqghQ@g25nIdiR^yNmV#T?HVF0;T6$FNzxNw$4``2d(%sqe{O4n7Q`T z?g}~)@e({uSQz)m*QC_#xn4p}E&}>e#A4TIwQj-d>jH6rV>?aFFUJF(A3J%0lWf0r z3H?u>?TpF#noE!)U^=3Bd{ONh9by3-df3n5zQ9?F6^2gTUVvu)sH_`^tOZSv>;P#i zD;`z1;jgFj!u2Ib{SCxV+QA4JWXQcF2>iP;-C{>*b~)l{h6iDTJV3I>*1O8a7R)G_ zn#^sFUSNW?!)2?2&<4nit$avUvKrB$j|O7i<6Ydu5v}2UBKck`zu4A$Zs9Hl8F)GR z(whSJNZgWN^LN=(p~Yh^J6CMlYy4H9RCiIB*Y(L{%yRMRq;5)~Nr^NQ71a6La(U0W zAPYmNy4^Xab&o&D@Dq>2)UT2e1q8;CN5cK};=y+Q7PhaXmR(A#u*U2lRG=vP_Je4! z?uwyiztBSq16UOox{0e8LcG^DNGH`WW70HUKQlNa_E7o zFz5MIG)t~OqsPhgN$d6<$hV40a&Rjo+i1OUC||u-GC7*Gvy~Z?K=SjR(J@D4U~H&b z*#(Us-eK?5FUvaHn#dNA`HB${Q-|TE_{cJ!amrZ>5QOJ8t?P_*xHZGUAtiH?`f-zZPX{LRB z;@+Za$gel1^SVOqi<0MXN~@B1b@PIf{T&>n%?q;p0cC%di3@tQnN<#f6&KFi+#z~Z zdBP3|mSvl~+9uDl4Y`_1!mUngd2$I7t_7bM_!_t;`#)9nB8}pT9!x6ZrR^=YN5r5% zxG^3O$nN~~pWNIH1pp1248b>Xnz?<1w&p5Rj+)|}GqSWHBC+mLaBCX5LPvlv-T|AU zH0r3=^dMrYF!3dU&IL4fnXxg4^|r)eNJIQylL7@Ro)!O(`Scu(HMdzJF@tsPl9e#T zLlt)RoXUNpK<5}ZsxHohPK(qt20&Huk?xqQ>SZa(MD=&_TA|vSYFnkA#wdTOP z7f_QNoB8s&cD8aM<81`k3zs$Q8KwM+w!?$eR-y|PSWh&iy;ePxpf!LV@SQS!%vMLG zL}8NcveL(7PTG-v|2j~5VErm+bs7EmFcPQZw=)51-H6VLIUfn@ikz1K{ap0`0uA5Ospe$y#!myd6Srn#TE)_UmYKQl^FjyNgXu6=Cpt%KA+hGVZfo|1!~N~EJ$r$7Ubbg^vf=5U z3;StSgdLIel?vBrnVb6p_7-VfE8|CO-Jk8y*r)A#)a73n&{><=q)aSlQ$(5yj;**> z@-H2F;f5#=XUR&Axg0jRvIM4FXzYlNlCJia^e^FioX-O!P!k?hEAtY4_%s}^`h|cd z7>j163Pt^3hvO3p-{fq`lk{X?b#-fLOs_MhJA=bkfY8*N(++EkAWE4p-6}|380(E) zhoChtpXO!nok{Lj|LraMGllmeE~S|z!i-EKtOj{^a$mmJI`NhvRQiRKN44$Huk1m^ zgkCaBhxowLO@l*u!O<4zjCF24fzGM{EqsvBiF61sSt6z%{~Fff-JvbItm}d5mE;eA z65i%~PV2d2Il6=bMAW13zAw%dq}$%hJ@Kk_!Ra#4`rxCEynHzWIpR!j3OsXQL6Y94 zz=Gi^SAJhAuQpO0fY@<7OahNZYbh&bRTxKcKD_Z7O>N*kzY377|P zv@1V~jHWhk(TSzi+{N!i{bHM5W^TBlR zVNS@tBfLyWj2vkWO^51~IpEOD8YDuk4XYEhgoxc|(Sr>t0X2{qztRy`uRzOuOU^jv zOC78bwC{|%jWWJKR66imsNn~DX**pEA$hbfK*c_<$Aem>uS4%dN1(qi2fC~A)9pj4 zr+85NA858T(_@vEDa^Mqjj;o=C2Ot9{*-|KUoKVzWA3)3B8oZBv8H6ynAfD&gy$K- zAAM?YJvIxKIL!<-e;W=vF^pEEwgAYtE{(s%#qYt6zJV#_F32f0U#jU$e&rWhx){%m zJxS8WSJ}qW!{z)G^y}8vX4wPUPaU>uvEDTd(|~PDADFyd%iu!hKDeuwE07Yzr#a4f z=Y!OM_*x@T_)T|vor2Y8bpOnmI_Z`wpZvyK@E1EilSo*x_H%=oc*7ZvKHB`lg7))hz19=7<3Y|#R-qVa7;Y5l7QAqw)Jbt*Yx zR*Dg9HCHb4X=BG27P?~a*a+jfwdeoGOAOf804F|3OpR=AGhDVr3{&>Hmo%}VW@@#| zj{Bm!ziaLO)#KCa`dGM3wDJsWI+YTgl47PcE$RY4G8kK@roB48|^@s~RLRA5!kV*Ce$}|R4FVp7N%9G>G z3Q2t$!fs@f>nrlEBESx}Xl z7Hn?5E}--BE$ZuIbeNV}0+5$W`AOMI69+od2ei&hf1{%QW;M)VB-h(p2}|eWRn*t# zICK%y#vC>3Sx6f$P+UF3>r%DZ>-W26?y=QVRfc-6mH-Hl748~G=FUua4XFq8zlO&`SlB$Dp12)-(M6@gUb?=!qjE!$M`nR9WDNLn z`)OZ>r`LZND~Z(kSB+@gmwmkKJ7H7N;lVVk`DJIo0u){-Z4T?dRrCdvGBLw z3TV2usKUu%s=mM;GeD*B^QvT4G-wZG*T(D5pjuAVN(>8(xtKWxV#dN}9S4!}+LfVz z0TDilN zUvAEVQS;vu5YN7T`pEZ6_8yHN1#*}(yb%2KYL~}=WRgmU<2N&AFI!936HU;|1L3Oy}3()aNu=^{n6(})Yd*W9@1FH}_l zoosNJy%?E5FoT6>atU$4X3Zhza86L+$_;(%N!7j*TI~RFkQ8HDl{SZADoGE+rVNCudr}=DrT#(l4pK|RJ%LIHG9~oAYaMki z4U|`!K1>1Gs+9V=1ZMq>|Bxm50q#7DM}(s z3}2NAr31QSPkD$I235)>k=-lA3OAk?urQFOPFT>ZsRJ1C+I)tRYc-g*$lYgz`FNsC# zWf>60+W5;90PQi|l+ODq`nkH>v8q?A?{66}f?_a&eD+ivIa`5{e<>Avv`oET2Tu_g zLY>L`r0|Q$A^D@cEQ{f=^3Dzv@tOT}zBc^a96~$lXcCWsYv@xGy5aEQHHC2j!*4&* zsFG>f>Eg#?6@yKzK4**4M2+I%q_Sq&Zj?#)x;nd$SN1XHqb7EEOD&u2mb1=Ql3W*`yFfy#W^_jmN&cU)gUNZ`7lgUP1%(Q8|6i#IGX(9#UfO4%S<>w?wk>Dto0^bU*wY{gWJi zqK-#o_Qc!XD;s4osTvd9Tq;>5<#?gSjQYBL)OAb#uJ<+(Ik0fLWN;f23tLsQX+! zlgK!g#aHoG^hlo!rSW@0*OSq7#kzw~0<9&%)s-36Kr zNs~F{sK=TPKb_8bkzOHPtcOtuq&)_Q+5YFikGUq4unuBw3n+!CUei(MkdEqN=m8taU)Xsu`8^guu1^F>+}og|QQw=N_C+pI(jbED_n=yBxumnLAxFvUfTI>cHd)TY&?2 z{FmJ3uMESLS{^PQKlF)Y%fH}Z4bnO#&KmN!#0sKhd~MpYXJ@G(SIAcbqcM+d|LPRd z2MQMWe0ArA2TAePjRs4CWc<12Wq|{BaZ0q&vja|{R?WFTRQ>1u_7SAtK+rIZNmd-& zp>8(A19z+J9w=Ma_IB+al-Zi=P(Q__dAlCeJenIkT(OaGB5*$U}Fu&MIf|FunC}d#Z9gW1MssfAgy{Oz1`nS z6e-Q#Bv)g;OmSU5&yQLq!GZw8&?2SzVVI-@5^WAAx~S^af~)UB*g4oWJPE`2;#B)B zVq>GQ>-x=4Kgy$4Jeo{WH@m?Y=Ef?Mb=3@{yK;Y?OWwxV9#=SYUc+xcreukVS|}&W zh@C%}^=>vR0yslH84Ul8y41=oIwm&-?p-;T&t1Ho)hFyoR5_Mpj2=4J37&* zd9;^!vc~}1nPV$ro1670#G<+Ouu&fUXUmJkV?!8)5AclG0Ei3yHly5I)2fL5PzU7pb=!kogjZPC4NgQH0Je1%F)HJ*B7ZEq!?;ou z;f^k8@}*OCBSsqH-n-UV|H~=y@(#G@)FWN97sQhA!4Ket(r-0I?3K|f``4AJjJi%Y zC(X0LMJ_YzRVx7%#_}KffU<>I&-coiPL8fO442bEqi#*Lhn844bS2mWCfU8%!OkMJ z@=Z$GjF&pRvpca9L5WF>Gs*0gG;o`L;u79gfJE*ZwwU1>_Ks*N@9XOwkCD}Zc39M;~)7^ z6?@jiGhP@VxW+LXjua?y&7+(S-L4XTYdy6UdtVyE?WeUU+wjpL^#I(g2@DIHmiZcr zkcI1Jw_Px=!P*GMt8&B$5Wp3 zJLmoL{k--2LqCIuJ#pXn^S!@uUEizKKgz{8sCuWCwYxx=<5It{dG*M1vV%I2;(*;% z;cK=k9$Uw!df~T9*UWTio_!=FWL>}e(oJQz-CpfM#F8YI@W*J@#gCQvsaTDE=Y$;k z9Zxjh0j)t041W6{`M$q`Qp^2LUH3k*v|XVitNpj^?axdufF~SF)Q(|Ddy?`u!}M3m z=R0Mf$*yWT^UJS)!h?RE;Mbh6GW!ptQ}{ydjzqy_+q@^Z?pUd0txu# zd1|1+*Xv1XrwjkH8+jf5gLC8aRFeOH_AuWR0jAzW`C0apKQVZJy^D9?tK0y5^1a!m zv%hvxK5ifN3w-dn_yl`EPzU@ykNRUD>T`;fUAp`R^50d?J$fgYYH%Xs(-Zy0Hv<^1 z8#lUQ1g~Dj?r3l^?9L<=Bw$;n8@W#iR*+=rD+JJJOpP3F8tZAz046&@`IrAr(Vrl< z8DC>BKmFI={{BZnw=Z%9GG)@gY`OFIZ~c1w|F5qGeOfIKD|Wp|%01N7NIDNF^JYc! zKjvRDk<@=fMt+Jb@bCY<;|Ip|N|BtzvN~L;LRB!*AG<=oX7l;lfBfB#AJfH7R61pr z@YZi#{gqt#5DRIi(0{N%#<{TSe!+5O3v_5{Uwfp?J7Di7D?hCLl zeTN2sy87X5A_M~+dD5sU`hh~^jHL-%{=ia)|?aPfSk zJTv>@W`JH1E$ICL@L3;!))C~KV*vPaHSKb9<<6vhBm0!$)1YLNnbyZzl{z%GPT#Gx zK3lE@LJQ9QZTbJYkR?;dZJ(VHrAs{$cTIMG(;WQ}fWBQ%3D^pb7|z5QTeVfa(E8wO z=v^?36~8^HJ-*r89MIpmAyj>nS2KBY5Iagab!<@HLz%wbm=+9I%}|opd8Btu5 znc>OY@{g4pSnJKK>}o1WGAVAQOSUao_$b274KH;NoKoP^Z}C)6bMME}vO29(wHI0A zo<#^bKRt9n!gn~{2@6#TvU0hq07Oa$Ik`A((FL{sE%U9<7n5)g{g2ic9@0FA_;9pg zZ9MuKQvChux3@VZrx<1V-YKL^iGzNbX?>5E+m29(+ofz59n&vTayD+y7R6H*$j7f% zpyxL}vv`?F;0=o#Pr3v(4Y@Yg@I);S3Ja+^IOL*l)-62X3GJXszIfB5(Hcc8k(_R5 z>Q+kOJo(SrKE)d(`aWppq>KmuebLq4v;|Y1;l=Y9RZx$A{AQ>xT-r%|&{C%aiXZ87 z4aWzJrOL|eUzFI$?d?^cs!fp=96v2*oI{6FY%mXxiJZkidWwNN6 zsI~%X`xd$^QvI+44yw!ii5oLQ=dt$%Si$o?gr!V2%afg{iLu!!ed5m>Z;cYbrWlQn*;O{4*d0dbkU2fCXf5A*QBV~ zrG39q)=%JvN~%?iOnQ}z4>Du%@a^VMRFwsKeAB1q$p5oRfGOXsQ;Pl)`DeTe=bT5` zgb+SkNPH#K3$PO=0l`(hi_?Zp~_c5 zKxRm7`J)LLNN2Ub;JmHd43&OWj-gr)<0omspoQILca$MHf^< zshlg_D!*b{Oxr9f?MAzW{Y3%?T}f^`8GT;>WY0)ov~{WJW0Xg2rr4`Yyl|9OKCJKd z6EWP`Zs)e*xdEpbzLS^dY8Jm|;K$Me$m~Z`LYLN9$Ls^3V{1*Kr}{FjK*pK@4uo@( zr}SFXR^Lfz!pf4-se=v|)wnc6t$T=@s22;E>@fbW!|J4hyO@$6S`a z7nigMN`h)rSiO!>x!6*H>B}FU6U^%S0#MWNnQo=HC-*2c6f{#BE|PGFCL zp0-2F*+KI}amYFu$H7v^hp&+>VQHBNh{iUe5=AJGD*TGu-fc43x=ewaXR%4>PW6;6 zxfYTZEPrkGlG(Zc@E*WqL9SPR9n7Kz`tTL{DUNre7)R1E#q#H~$(*@1w^bSpx`pf9 zOp1F1;X6y73H-VuyCq)1D$9yj_!l=4?O!4WwMpC=Haq>Q>Jt5amnAruVY`jYQ7+{U zqm}A{QS&tO)*EWuV>R6@QMdq^g@3VDdrUv9RTPEC=A7_O!IMFVMh5%fr1h5UGU9dj zU|d)Kz(5p?v5s8Ds7dC20{qb_+R=lvNGa5o%H0J+h+a164Zbqadk^&*& zcvxK_wAE|b?=8}CSmMTsoSHg@i|cxNAh*QPmrB*?TK!nWRE7_g+SlBj=9Ah?p@)hjZj zeD;ibz8mMjt|#yQnRhFT1_e&phlNjGoA%Ps%=hL^FS?kEUCO#g-5c@tt$P`-yW%hu zKqwpgeft!r6l^tGjVzFB}K;8I0K-~AN)Ph**m1S`3X>*Td8^hJ~OP%#ASwkSqxm%xT(_G|s-!zwCB z#BEJvYS+~$zpag8g6RPDa*b-0MV6GzXFWm3g?VAkx0m?b7kU_y&ZHVfeZxGHZjeSn zjl@8c2}_ZNJWd(?0Hd7EV@7ky?sMH$URpH>CD6X^m8(IK*~+{Z+Cg;CVz_iAdM`jw zoNB>=4CS?U^#@CQZ($`y1TuQ33`!U84f&QEaF4?f>w!Il6}7mjm7ci!vZVxyiVba! zj+|_WXd#et&*RLr=>>*x-A&uBZ0h4*cvJkY(J~!b85?3?X<@8u%BPK|oc?8T5y{Un zg0&+_cpvQI5L!Q$bcDh+B3Ft1Co-f*J%WKoJ6Fc!InZjsP$SWe`Gpn#qL#O8W|zBi z5uT%+ip4U-uA1Y*2K$aynPa1pTNl%TN{Dz)i=OffJ?;)$M;V6ZQh4bs&@S2hBQzE> zb=B7K`J3bEvy+9MEBBbo;6yr!o|aq=w}o6WD% z?<@%JZG6{MO9h%HGpTgicRtZ3jqbF>YzalF;s$JD_+p~0dBFX7t`9KnL_t;+Owjy{ z2*Tkm)pqod*kq;aet#28WbTK{w!b-lReBi_&}qK}8!-}foC+t*yfm)n+}lG&n9&?wav{iL_C>1ACPF4ZlMa~pnI z8FBe`@DT)XeotS##Rb&%%j&*^IC2i$N6I)d`Le9jZJQwEuU`n=xOBU~nlHVJK8Z|` z@EK_2@GyhPmg=8FtBeGHaZ5|*h?}vu_2>uO_sltY7t40R3>9llJvPeOR z9c|R5u5A$tdqx2H*+M@8IVkL|bz+H!C;$XGd;ur}D+$}3dmM==|6+Q%oR&|XN=<^h zR(uQdoT2Qf-t&KxG+y-Vg{s=S+)n_NSeZ%46b8Y$3FZOSm^)OlX?1-6jP{&hC$D;( zDRym1Ty+R#B5|Xt_Y}Wy_!}?Z28D>*8MHg{<@{sn07qn2C6H{WBJ!+LGS0ZPY3gP- zR38@{a(0;2!$G{;6s?m@n_(D5b@OtTK&|cIFaNYUTnx|2P82f~v{~yWJ=KCxynWN` zCa!sR_}{K|pg` zC}utW=^31$yu%1&50K0y|90%ES|FLxFLF*W=6^fw|K3Z6EtdNK(F@=|9iIO;pZLE| z`OlUAf9vY~?_B;rJ(nN5){l~bE;2xhiii3vM@*WqunSN?mjZMYjjFlxIewT^E^KMjRVlryHMDfA9YUx!)9Dci35Hegd*_bZ7YejVH%KHZC$t5;XhJ zJI{o74!vEKKAznP11pp76C$YcpW&Dl8=h$ljP({;@Lr$s%7s3t`Of!PG@?nlg-HwZ zK@aF_p|`y#GjS}4i{y^76drU!USz&#<)4zAl8toL9t;oEm^+aMT512 zgObe9pTVe7crB1ywIT%B?11vl>V6!wA!ei_$cE%MLcY&H4|3tlBZk~?CXlbO^s;e+ z+31sz2htaB2W$;Hh_)}C18+Pjcm;k%jzm}o+Z?1V8Cd%4zPPvku(~Qc;kC#0{qjXI z1jMKa(*8`o!}4MOq|N}_H1!`-6j&?8*wJ1>P5)il%`Hq0a_lR0{lRwY?BXG0LuuAJ zHRi&oVlm5r*GB14X^bqZY>VA0O>)Mgn6KxQg91IK)#}FCfvk8nIqSPNXeE^CX0&RN zRp>!rVUw?Ix?)Mstp+ufBQwU-?vHh#BR^y^wYR!}#L!RwPPsDBzq;6&ZNpnT^emf8 z&l$tW%PPoIsY@eycyZ0w9>cAfb)eI~-7N7DIH2L8NED4tzsgavO?!2DpuXM6{8PGm zgd=4M^ytyupXUXSIndZ>QXe4t62}_fmpc@r-Sb_0VFT11ZuNlYHCb=hDcpYJw`GtH z31XiauB27_jA1%D%BrE(HgXHp$IbKsElL2D=|7DO5JMne$L_eDd7uz7KigSs_C-F> zW9*5^sv4&F_EWG?0LJ;~*E2%SchIv*;Uqky*)o}Ccg7Imw50@^WGitrbY3mtuW6sQ z(!jRO%P~^Rpu7$5O^1{Xhf>b%6UBbila;6&vcJ8oLCOrTn$zzx#W%UAIs4ORPE|)F zUj8QLdZ9*Dd!n)M=TiPaW7)*MgXr6nWgY4q*OghMu<3j-NtG~pH^2+)T+aFY@_`>eX)MbJGq&Za$qd_%qlGVmO)Dp3*kk$v{t^Gyjwm+(a7q4ZU5oGTIGTi)1Mw5m(GdVjjRoC(Voe#+5WrK z9UAtCrpDjLTPZBiV^S?|;_&Qr%cq|685}>aoPJ6&MIos@?V%Kp$pY{O7?qQ|nEv*J ziF7BrL;s#rd3qI0CZKkiGA`)fJmzF_%8oA^6;8T8N9gCL|992}_-+#+BHrtkQXc#~ zbxW|JV#7_s;a)(aJ*rwTZ@4)2EC(#ocW<5NQ-YZ|zgd+|_3=LZ_1m{GhkF|jQ{?G0 zrYcjMh5Orcd&l#M$KR z!jv97Pz6e9Q|mtN(_N}UF@W%Vb4HxWv-Yz28q!ovH)#f!BxD}2RicKFy+h{`ifuT@Y5OfnuP96bqkHOG(HtsK9yR*&d>F3nbIGzodeedL2z^!vZ43a9<-8z z`v>g#^>RO%I4NG)@iF{qmXwEqh2FKjF*$!tX{<%4I=XDsr(qQO{^~u;+FF00%gGB$ zPxG>6tB<7O!=oL7$ogPsk?X96_hKP$x@v!$3*Nn-N0LVXW>D<(`V6PA3cz7QG6|*F z-zN*QJU!xk09~(7a15ntF)pdPg~&_9e(lQ}=GyUGu+qGp8sKP4sKyV~86^>DCl+Za{rC2mCA|77-Bl}4+PqE4atX#OfC!%W zJ2zRL`+&5>O&~u}a&Sxc^L149eo@uhKAVWueEZCndQ$>#gsAP%H#uDl-}D2$oWLS?ZrW0SbK6jU&wAOf~*cW~=iM|voQzQ5wy0Or!g%1K zC72WifV{zK!2@LNASEMMMjIjH89vfAZtcC-s}FrNp|&*`0schJ>9_h}F4uJkx{-Xa zye4%xNy9j;4f+rmTjjl>9NTILsN9)~5F462hAHSb3sC!$)d$NlCj$hdmxjw)exYGh zW9~SyTwPrL2&1RYBKir1a4t45s`AZe33B7RZQG?|STe70)1V-5#Q#8`Xw=e?v^RTo z!!ZaqkZ)WTZsK7m;7NsJ%GCWfqF!R*iiK^onaO4`)tTo`Ei@B^*CejXUP9 zIo`+MM+zsQ9uo{%;iL-BkUKB_79r|92SCs)}M}*tt&1$$4`#UtDb(nkMUpo&ioMLK?AA+5^%`L z8RTnorg)Z^$;xzdUuTw^Ks!=V=+Nyk_e$O;eFG_cq+`jDgvsRNmSV~}v@xvWUj8S2^EXKP zmIQIf$O!peFcX3wBrxphATg;XXHjp-veJJ(=Ty4dlm2Bq(D;shSebOf&##gXz}jh5 zAFiunP>zJru3T>rM%KR10caKZi(2w#)Chb<&N?xb-2-Etwzbs%fNGQT2FSWK8ffsh zdpK^(m+_u6n0hOUN1YYns3wy6o^M9v+$;7V&Va**O#CL6Pvh1Dzu3tiFbNpVdyE6-*WT|~VMfj~ zNxEbz89oW}D0%-Plqn$$?>G&V+eJtzz9Uq%Rd1>7hm9R4zO`G1&_wH^%zEik1xzbP zu!333>(hKnBwO3QV&~^%L&rzd>F}Kp%caV#wkzB2;fUM!RrhG(R7n2xh7T}7oPX?f zkI50Bh7fys(uDnBO+XFHu#Zy_>1p!lft>d|RqU!xVIfW(`)FtNq|s zYhS*v6Og2IrPq-j{3Rdn1w+~afzBacP>q_$^1xFO=o{6T&$_|MU!@4ia`4u&!40e7 zzPv~=V?Cfh=W`RIWi0@8QVbK?=NxeC4v3j7^s%NSe6+|pIoyQ7$Lw=duD2V!FyW)G+@K6f113b?Q_^ZiKi!^IrEyl2O&H3PKSNG4UZwR+ctl`HO)Qgm%|Oinr}@XB^FZ35y($-?dgx9UjY>4R-Q zgU>qI4#5!VE8yN+x|TV6NhLr7T6kEvt)#<%^4{v#vL!FbhcRNsLHu@!iA|A?kT?qz z0}Se$i+oGscmtJZa~0fuZBj%snd=41U@kS;&e7+W%hi z_Ud?)GB(H^XW~Hqd{4b&$#EgQe=ysXes4XL23DV3Mi={gHj~Sb)~2ggK!R{1#g~=Y zv!k{j*pIEgy=tHc|9)BRt_4Rhm7E~jx`Erq29H^({(44LS7PAl;s%ftojGAx@Fd7) zwdrP(jOQ9nvedA9V)iiwRWs1zAIG*^fIs}YLr^VSUNfCZtlZz8A}RJpZqUME|x1X*<(mPkZu*Df1ZtaEzr zjt9Z8JL(?hz{ZSsH|==RXK^>Aa#;Ro(UlP#YOEgK>uQng+VE+CP*bya*Dks>QL0-! z{@MeLij92C)ZT1zlzPI%@jLsG^4DQv{fh#~{h=&P&YdHd7@E(S9%Q(ROfVWcBlM0C zRwMBR{?NlajVPF22NGbuAn0I@Rk#(utjY>bK)j@|89jK)a)R=UJ~;SEyZlBp4S#PL zv-j%Q6&C4DTZ9CO{($?XHPV);zaWd#6=R=ohVAR|Zl;QLi#1)^MJCcI#IykAItBPAbB`b(E_v%Y99_{Q9NCT;HrGp>7(w%^ehb>62C9>Jr z)D1A_4f>uHkS~QE@^!R9r=YWR zP;FYo*%Fdn&nem6(Vm;yN4GfE zZ0P|Jbl&P`mK^9imID=qs+4Ts&2eh^I`jgn#;kJZ6g$^uX^F+bS)yIO(4$qg1>)4{ zB%GHCMtw&~fR0VP7}XX*fE+IKSE9Pr%8at2#Ri`sy)(GE^)%p)EFj)T(+=HaM6rI8 zh1_Pxnj^d8G3kl?g)b_MzpPUk4;QN+IS3xz)l%I%J-xJm@VD8)nPGi~?U1ka8ij^6 zJ#|D+hzjJ8j4jOfe#Byl0zuV&K9zMKxd#A7DBY2eET0A6d>;fGQ}&D&fWMdBd~~?N z3Ng8z5*%63Rnd*_Dut>7W+bF1SJ!lTWvEHc2ZwZDUav4Qu&j*KK+5(eYoYt7=$BV4 zQbA#W)1}TTQQ6hk`6MNr@xkvwe^BFv4#P(Gb}!6)6Lx zbzzl``)&S`{)Y+jA+n%n<(e~w?aA1wvHOOGsrto z7_^nz=VRLpqo<52=;BZEp2`f8@{$V$~wY?!+bGrmKZr0_$Q=+!7vtSsvo zhD&+0mTGspVPv)U7Q5S*1|SWs!g)CUMC`%0lU;`q-DgAvgoTBVF$NESyoZ)9p!()R zGsO4!V>u=-8bK%7p~PjS*8d>OVYn1;c!s!zv>&3(4cMEGs~DDmhLe(3V6xP-HOH)R zYq?qT9syaDUzsH1Qt`JEi(q&v$b0?YL;Y9aAU0UJ9u@uk%4mf())QNk#lpN4h`|Ls z1;)24V}U~a?&x3^)W$e^ibjchrC`%L+FDa8Bb_CSzohwq(|?Yt#%*<*ZK%u>R9q~R zq#b@)ZazFGb9g@imRLr(Ogd4iw=~o>p2>#Kf5GbD0%V!?re`%k)+Q*I^=+9I6<`f5 zm6k3IJxu7!8+p z2fSj&Z8>I;V7|#}qGu^_PwZ61vwI_C9UwI(Bfz^##EU?OgqB_GU5}9O*`rYSy2h2}t*`{YVZ;8k0-s8x}shM=p*!D279T zSnoW3W$~5Rq%BX@gnK!8 znrNLN=5G7}ZGF$13i4U-n$nb&8X2wfe>pdj5mF@y3%hW0LdLhCnG_8!q!XqIs z2b-JhvwqIDFZZ?S?1Od6UB02o8C-Kyq*x9_bDg;*T5u9DY!{QH9$^l!>&-2tk8y$0 zKAZ70wU%2fYUZARsJhx&&h?`}LXj1p_d?#<_5jd%XOIl(PeuYFlO^$^&L&D?;us>| z-f8wlp@n+c*O*GX%I%ZKX;#D{?)9~AuS>&^I~*#Ug|Jg?Q*+D8mQVNAt(Gs-O#uE- z^Xxvr@VY8y-{SkkMDD z4~66cj(v|4Mz%hEDi=C8LO1*LDFD@9He9L7L1Wk*#(mu-=seUH@BWs44009fD6(>8 zRBE{%At&Fe;ZtVY%_MU?&nHyF#jR>)BV+@}1@L%nzatiwHsk}v{mwRU{@n+Uw0OW| zxJms1B`C>s3xzN8?!MoiJSAioSYM+fg1+JdD0Vm#M1;(@<2=#Du-;|}KEMitNYV~% z0}}-^PeW2PV!0fA(`HOEUQ_T zMy#$;V&s?N%+n{6JYQ2$WgQ=)@tF!k-u2w7`7y%-#22B>->fS49Q(JdamZokc{f80 zPEf+o)o!jeJbSqJ0R(^5JRPxIwTrh1B(KHO;jopjetw!bR1YPk!#@w8Q@VyMh z%TaW;qw$aB4b3&s8gWt(xq*clh6anl8lmO8yM1}aen?ylUs=14i@uv$N#)l3$tn~W z3L9V;x+DO!Wpzui?aeG<0~;566#_&jW8e$3))}_nfdQ!8in5;;ck;Pn@iGip3GQ^C z`XnrdamjMSux^^OqJf#{=h7KiVt5WoTbeDy=j}Y8v}Y0}Hp9lXrJ#@K${=rRB${jX z_s2g;jAhM){+cES&ymY`)cfr*?!R|_U}8`eKX)c=e;-I3@MNK#>f$Q7eSW&G!ZDyho@t zyLz%{lcn5udZ(3aJ4n%H_=CPmbv7BBnlg<~EpaA3ui?t?zucQ@%1UwfY^4v(26*9a5=^(#ksfM<_*@w=#09kjszaQ=6F=@ z^w?8h7o_-nbYq*tnc{wX;uZ#1<#NjgzPCQ@smEQR2cvjsVq;%9EPXG?t^t5y@=NkcmJ0#^biSmL+)?T621`|TwuTlq)9kS~Qlc^t^^}`o9ryH_aG zmM<1bi@+9dfPzNAKPXZa9~nJ&LNvlN?fzR?`)5>>Y`pFZ zW`fAqQ|A7M<8X*3MU?kUEQ;Xu=Z`aYe$h_0 zPjgll`$47m^E$4KFi>*Wda<-+ft$V6jC7;PcDYFbtw}9bHD1p)-K%e2*sw4F;frmc z8$+?VjYv8^%#SI1o6av`H}Wknlabx`nQY?6Ak_c}#N4hb_BN=;Hgf=wG@lxT+;4Vm zkD!r6d@UHi*7}WhL8<1c*UFJ6cLZ=d=HZ+Qg6(JE_cu#`zV}+Kmhz8&O-4;&-9<;ew21!3{a(Hwi+mSZ=JYWK6>A(#q9cBPLAtwW=7&4`P;tZct!)3OcLc< z+unop1&Mum32*mcqr9s zu_ju3dx^j+K^Rd4NEZ6@38{TKS-e*}#;-U=@`>(00^Mtf?HdsVr2aAQ*}OHX@npHEo6K>^4j2GB+suxpsbq z__U&SR^B%=PTztMI`Mo%wBD%SEH_pY9#qLEqghn*ABG6W?u9Lo{dnm7(+79A0MNFnT8z!MaR-*5W zenFk7CsR0w3=cg$t6k-6#Cu~HR=axt7A(^aqVOloGw|A@{1I+Ingo@Q9-3v@Iyb(4 z40k*)>*}Pn7js5bp##3Ma9z5u*W9|+OJFDlt(h|FF({**j(6H`NQdSuc4&8VxcB-znD<1;c$zrQsRcKy;ukI+uQL2mg#5DxeO&CB6Tt z^)}kbM-}j-s&Z&zg{oHXut>Xol094nV>E~1fakP6Z;ztJwy$5jr5u`L+!&p$z3A+7 z*sh(Wa*xrV8*x2LJv8Oi6=jau*H7@q! z_cus{WXrC}b9sye;gXPc<~Q~F)kiBK-(>nyG_CZ0)Z|Vv-7r7as`#X43xF@NdjZhm z5nt5@6fW%oLBtML`(e=6a=*tyQP_$j)2HI;5 z9gZ1Zr9AA;mYG1D`#5NNJZ;o{u}>ke@3XEw$<9NrLhl3{ZBd~ac3wjHi_bB0ExV!vmGcFYO4H;u}{ zm#PI#WmG5YtMSo{?BA$>D240JNw+)k{(kU>5!46e}q`Y3nyE8k!p)r$JM{k=Df+^X`V6UGMybrs*_LG!Ei%Y#Nhb zwCq1viBk-wN^s0m^~xEv!ga99S%=9EpdruwfL?MjK-%IfhwrGd;?7$MQ<>pXTG^Y= z%kt5(wU+O^QR62;A8O}&osT{zNgt!6wFf?RTBKVv$oN$TNpHD{1ky0diw7n0wDF|$ z&*1Xc4g0yQmZaT%OTG0Y?WduWYArsROGA6%bsjje{$yf-4xVtlzrDW881b-lanSNd z(%GwnfjBhn&mysWulu_fGO5JvV4K6kdvU;-=bcXt(faTcUGj8e@>ezGwO>p;@fFw^ zos*JxEeu;hAAx$`5JMxdp+ySmpljN!vJG=8t2Vant+is`$Y%-oDqiuPSi3T@bJr=3gSV-rc}mKNfm;9*?A=%E z2@1M97sV~UTeT<4m1tTFtG?FN5>#$Tg98x`tk)dU`p(yTqhev!?&)k>lC^jYvlSrw z{7~8|&qypIi%9NMcar&fw!g})>Gp(c+##;~xJ|{VM&*U9I zldisCv_4K=8#|SkI{6NYuf!jY;so|KW*L`Q&)IbnzDma&o@BnTN%UDEglk+X0BFge zSgSBV5K14AvwAk4l0=9*p0)lJM)V`bkQ^o-_GB;4Dlae?Mf-h556Pb>MmMWY+8Tl> zvRJ)Fo=_2L>MR4LGD=t&q2FGWJI^OVXZPI)yQ~0Sn${i=|5^=~E)~HwN0i84oMsXY z>buI5_90OxM^F8;UXD+j;9IgGsk9lTbJu~Q{qxJ>Wq(ARr_!~9HqV1MmQ>?4IGM^` zfL$o-XTpP^;?N5m z-zqJMT0r6;A)_uvKQmSL>_4EgLB%(KClHVq%iqhTo#Sc)#GdyJ73g_?zS)16)m7$H zTF9=#wS0y@>xjQab3c9@lLc_PG7-Cptp9S5>kz;yzaW7oPX8OEwk-_+18=-)&;GZA zU(A8h`2V#Rz4~aZC;L>DEeyy6|MB+z2RZqbLy&fs#(i-z&tD_J-{uy42?*Y3(?aSp zew*j9Q{U>7&!$>6jyb4o=R~!7tf!-+7c(XQG7~}9Y0Fa-w8ufvNKK#ETQB{{1hjQ{ z;0wIO7MIvja?#Kqi-sYbyt5&ExTTprW}WOu|FoKA&BRx94aU>RKW^^?Wmk6K#vVvJ zGddtsj-UhpW?J5hp~c?FDeA{=99fSa{<`*NHGTk*$c#$)4Yhm$I>cmkn6!T)A>(F! z`d=RPIxPTzK__a2tE>k-H4p|MJW{UTj>gB71ip5>Q~iyc&82%XJVo-#gNUR8=u9&L zxUX3L$EA6bmZ=%@t{;Vej@uY{+iZKT;iP;76^dgN{#8COcEOEoX>9zG)IEu|ReuTR z-yjt?nK}sWK&TpJA?yr<+fwMva-2h!LV?XALGf7U4y`jP6hB`d$c2vfq_(&(06qwG z`9l3g00fo!xH(a3;@E2-VAbZ5wu|j=f1GYIrtB8BD8&F}Yfb_sSjyv%M7RYW+_SoH zbGV^S*-?ZnG#2EKY?tPcbP^Rj`EEIJcEj&$#Wgc9Ng{q5^7!>` z!zu8mnEDNu05EjF6eDfVF6`E)Np{}6I4OL9~>kC3>Jr62deo%2nEXZ1jp{~mf$f3 zG0Lt=@Nt!MKz!EL!Px#e$$s*Nh=%6l;LajQKyjip%MM?K@`n+vPvADR%S~2E8Jp+s zMk4X_AS42yn~v}&L8gBZ{nNa1F`Ogcukb+4f}qJGgMbKXbi^#1pcepZ%(S6y4LMdH zJ{zwfqG9y4b>?o(+51`AAMI`n+99Zc_Co+lyKr$_{c& z!Nn7(K!1pP7iE(Mk^m&GzBi9sG^q0!6er5c6$HEtW697d^o?W%Ir_9O)Uf+)ek0OL zIM=8G5Fjo?v}G*6b(I)h=Gt_0&85FE{f@~C3wft^*P_l_kV6`tg&+84_O_xO47>Z| zifsi0GwIdRkjAAead$ImPyddM+vG|`YcLB%O6c;?DZdHv9~;d2dNtm;|97a_uS+Za zC0HEv6kIJk*!4aA#PMZYk-(XeZ%+Y?gXMnjyaZ7gU_`xUctv{4``rHnAW!8;bjN^)m1* z^+!48k0Ms0DfVfLxhsaqW{OGezki_z%IhT?(uJ} z2)KS+?BDK@jQqbB`}?>s{C61tzP$dsK>kl%APmAMiciuqwE*fX-V=XVoxk1eFV`6^ zYAuNxdG{@PcVXJUZqLh%xM4R}`ay3;Y2x03hb|73JEPg9T$GyQYDAYtDfR!uk}P=|_#I zlmLWa6pv<1g)2$9SM_M6MywPHB>|Ab>>=^DNrqfJGvy04K3hW|PKHb|kHrxHhjwg5 z@(%7DZ8=>Lp7z(&7upkF6&<&8hai<%iNvf{sRB}E!f}^IQ4_{?Z*JX zU!tFC(#M4O&_{Z2Oo~+0V|S6n^N4$}WhcJ@xg3Ch*!=uOs9(PQ&?ZZfBf!hVH_)hX+`qzv^FOx@dGd2nL7AROfFcx&+`KJ-CwUWk+ox4> z58(RZx10>)?J8l?84o94wtci=c>5e|bfH6d`&4qqcislEzxZN5#zjeu3c2ygojj<} zA|*!9)Tj9~NpBc;Et$oFRN(0BuAMdH?I-6xAMLw=qu*wAnQ9aS&}cD$Bn>`4;9!Z~ zjXr3(e+SLszS|k-Z}4ZtK@_&UXL|(RFpCXmoSTt?NVem=*PRSoqGv9~j*_ z>dJvyi)S(B@FVCCz{Y}&sa(DBu~GFM>$b#e39Wmcivw2Z+2&ZRvQe=O4|B}TrM!7T z((2>|AiyQ1Kl(u4wH9t{mBnIV-#L*{#L~E2f60Up?WsV6Is&2ew zb`y|JUqvN9@jD1lTE`-C6&obG56Z`$-*FkVDoJj7D%JRH!Bfz=t#u0!Wj~Y(Q^sev@5t%xJkYMYb-w(sUFHkEkNmDPeqj zb=A{9dvj9V8l5fq?nd29G83vQDDOc9RM;>jSV!(NAFWKkl^}?LeD4oKhSJw!iiTuS z-)H2|PwJjYxy`*Zad~K%ZCq9g{Yvs$sSH0U>3B?TSm6?ry9>zxNJ?sNnECAZ`BYn& zaN6$Y=~ujet;qazq!~^*2A!FSXxAS1dkvYL-(FGR+Hs%1!X^$Y;_XmqO<`I4WsZ|D zi{U?5aD+pBI5=RHX8St}HzUq+Sd%L%467b2M(t#{5hE3aw1efa2WQVG-Qaq}BAHr7 z=kF`K2ErW7-Xy3b%T_F{Pils}w zGUPF>+%9g3kQaHBZ`qR>QUGC(-FwXF%x{II}_h^*i`h z1I`K?m>XT_Ld8IMQ-Yj&|LfK#ps^p&{<-(D62!IBFQw$y42&ZHoUQWX=f z8#$(Jv2vKCEp2_bu=1C53&Tst3i6~wHp60@*Z7f)7C9GUhO;6oF3_hyKOjMDhCMAP z@je}@n8g6eonbXtG!2m8&SvZ76=~zD?Wl%g^NjDIV?{SKARawuJ|TG$Ze}%c4cm$| z70>zO0_`qxC8##MTj;3>VUz9N=m#|B-y}Hj%pL-=_EE%4RR@em-VZ2esrqb#T2zI^ zX5rJe&rK#Rf^ogs0*@a*_F$LB7j}d_&SFu~@4>X@&GP%Mjw^KvZ&zj9-?EG3^V9$Y zbWzxC1(LzszCKD%S5SgQ>%#?iA|6t3h+a2`$Gg5|HZ{3_mR-^YziGH2S6W_)m};Ju z*#@e1EmQ3Z%tIEWeftWVF}+Z0Dl@IpTt6z0;Jpu%Ye|yl?9sD{P(Z}EXyv;$uV%&0 zV~if%f&2~A%4nck1%88A6c89z1Mgr~cn7EJ-{wcJPYV{dv_$~IvE1{VTw`DA_(nQk%SRXk-D=u*&dPy!RNqlxJR6^Jn2x|B-M}t76Y`aVyqtm4lRU{=E7# z$o+MrWw=8A_?%112}f;AH+FlTwyUX?>r+6YJ&$V5@!hFu<_6_FBWKfg^2=UK5|?F| zt|=T!4kjxGpVH8mpv$)jRKjH2O0;S0;8_Y}5-#sfs5%Y^##iPlQsO3VLCBeguv)=% zoW8rSt2WmcVB1N38?h~FV|OTELvLN1d(6`vQ>Ym&4Q*uzemiOKcnQ}Pz#0~sj=5z~ zRllSUFljdRc}?T<&ArALR(Xhbbu^h7WH$E|@Eq|+4~LVUn+03*mw@o6l9oUGD38@B z_T9k<+Id7#Q73(@OEbe#mDj~%Rx0mSdQ=S4Rodul__YNj+^0|nkf}V#!gLnjW7G5fD1uFL-9p=V7s^r7ws-xO#8=H=xR0O4 zcABy9FDLy_@@rFQSZR+gE}{7=1spTmk^gE10nU}D6uP^0hsG2l@ zBz%hBaY>l>@;zhS6!nmVp#wK)lEjMU&SBYzZ<^amUlXP0WMy^oZPS!qBM^8CD`I7L zm+jqVa-7wlnXq3IY(bj>@0`FNHvMX7mNgpmeWs^pM{4f^O~qvQ2fD@K$N7vO$jF#y z9x5s7@G4Hb7Cc?9QW#N~LRDnuv0d*zlU)j(sKna71EWL8fpR^Dh*_DWF#ek2-%8b) z_$0V`bZ||tGga%^JqqT2)8W45zSMeI#^O~WR?6;$j;?$tAKQrQyiI{JXh^VP>KSNs zL|JAuo>8`2TKX92LFoBdcb(3QTdxw?D!#NlYf{tKJ!ZWFbFO1E?O*#|H65i{;})## zNpQzx!&BJ8I(=T8z8L-CE~f|m-XP&p+SG=?% za;>atx&>A3s_$4?h)tUAC0@>v5&iTDe@k17iQm97SV{waS504MMQZctqp2WU4+^50 zW-5#Azc@F-`o#4)q|%)Fux1``^g$@q(e;}Ry(ppx+L?%8PgO2t>YWTH4u zx(C*S{GL@e^;y>!#FI4X^&jbWbiMo2p1M)yc>b)7$8NPg zSvJ;DWpkXX8qW1MT>Fyx2a|;xe6|*04Z$wk-sZryeOBzATXEdR-vn~To=o`ulz`)2 zQ&@NA3rsF8t0X-20W)HC-1xOeNbjOTR<7A38<}hALJni&2CRFUZ@$-Ef=GFCbUlrx zz@{?{D+8em^7D@G1&2qRA+EqY>N{K| zyqNN_nQt!$7=1%?`xm-q$w2kuV4biwBt8~twLVq8w%ubtRvSVto;MjTCt?|haopb| z;H_5vM|)TP&Su(hwMezKw7eKvYC0{0qP4{`5vKKNE!Co8sijpr2}M$CsCK5RbfJ_W z%Aj^?Pb8sZ8C0k(iiApuHKCT2Nb)|N@9T9<`~3@(pPnmuk|+0i-RIorocr$Ol_yTy zubR|fgW!Zt<9BbWN&TD5*M0IiF}tM}!Aez5wQZLVir*Ccc13TBJE|0q+`Z~^wwiWE zMS-(?U3N8q@wTzl>}P&eh*5a~(suY$<)TTcm*$HVOiyl8d z)N5p43`(l^o9SG1FImnm0xyOSo)D~hh`P0YVZj$#FA=Tt3*-q;qkqDp5 zh;u6$1tp~>me9ZP0RN4)9xR+2%;~Q7x(XSr!ybDw^r+s*477+!wOWNC9Irlo zbKkrDZs)mR-3w07>a`nA3tn>EjvaCImYz4@QFYcX?&#|hwVD5dW+3LUo z$}&k()+qT>9F?F#v+dp+LSIrnW+Rh>w4vfv$mWk*{#fZWlh^|7OLP>i2?Ml+$a0_S z-aXF@O;bMG%=Nh~rCw^HJOa-P-Jl0P_>XE#0!pv__WRe~9~sStXIAPc@K>c*TL+e( z3FzF98hDwew9pVJfMAfDIcz4yo1Y}v$`K^k=5qTndir#{p0*rPXGJ*|GL~i5XgT)~ z`Q~W9fW}Sb&HQiqACU`rrX2?cbzOn%|2#P8X=g}%!Hx0zp9?2`nnhs})8cY+00FNV)>47>!B z90pg0KlschAV`=~*tqQ?zs5ZKFBozYd)ee{)W@^_Bd>^c$zQe3Uv~pVjy_lr$QIk& zT=>aojVL3Q2ho{5h?P?q80DBI+89aQ{Mo$jO?}`W;0JbXLO<*+aj_in zFOyiXRrk&iyhz7dKMQF;l8@2pf)zcS`+{f=rXJfJ4*~S^beD)$x_+rk!)t|nqG!T> zeXmu9N8I!F^LZCTf-J!wH*Qr1Ps;4^_UHCtw)FG)=Wj<+jS|XEk0aZKL)~l$?P>_r zgwTN(V#YT4=UU8l+H^nTXR$gY-?6F`iCa;ufoLXxq@!U{XQC$u&A9gEcd!&(#IZ#E z@;ZOu6xQCccU7gOZkr_jHl?9~uOB>d(r9u?Qs>Wd;&g6X0BPU(kM|ka=ew|u$L*OD z6tn2>c$p6>M{4)ga}S7Lq`(Qc`hZBuQa{i`GK~U&NyeP2O_n5fbhM$5U{_7^PIB)K zAMRh_N|-cq^p35d0aj$$DB(D@CXV~t1wp^sTKPwxDG<$C;asIQxl9}UEg4-iJ(U+B z)QDBdoqF1VVnWQY$t;0WU_Sf;rvCt0Psb`%m|6dHe`?$u$Sg5o1(VlOQW4N zSjt#$Rzp-ujM{0hqIi{=NagCv$ws{I&v}+^7pWkwOnr164=|pnMGjgEor~Lx2C$xO zaHaon+)wfIjLv`BjzyH7(1az$-JjD~<5O6W0xp83aSYrwK>IvC_`412uVTg=Q}4yr z9BjIo!3?pFHqOgPEtOuFwgI~{#<=sC6TkxDmA$cvh<50#EV2!aJyZho$=WxRru{=J z8CmLl>u<&)OUQF#QRV_5#Q3Obd^me+J{{1g3W??|@&FQSFw;THNYUv}bvka`zj5dU zTJz8y@m!G`Yfn^)tSIiuFk3-u(OiK8=E^`KcG8+1YOPEGZ|4;G1vO!Z)w zCIf^7YJIkdnZIZBBod$m65W%@%1!_faI#`;M{ zv#f_r4P+1`CgL_+l|t1+^RS=jf2bl`BJ0}PYwb=0vlXDz)4!6Yf8~SdpkmFhhrsTr zVArPm?|ekTD)ArTx`&rOZDFGlgLVE!96jaXJ9M-a9 z4!uS?`^t~R&%`G@p4fu9#|}@irjft3A5NBS@&eBO;=|j3wKiikX@AbleftuC&LS5K zl!Nkh3m4c~b*pE7laPqAmdAvSZ0_nBuS%qS*m%$I*g2jyp>FgSCzef9MG&||!gMMM z*(i4*8&XsU15CsgME;G}(Jlx93ou}@g}u{ESu$Qv8}KgeA>!z%u0SNqsJ);~vW&)a z3263$BASxhgbAh2`U;PFu9RS7y}Or(+Ig&n@k_v2DW@~DG@o^Q-L%de&-%0lp2l~p z_?r3lkJL6RE=CggmEe$1T`9)^NH{Cb;r)JTh%gxoKVnfA#w3&!_WZot<~VpzU=!PvuIBu! z?va^CMe|e!3Gk7czuP5OWM%JjV14R1Kq0a7jIG!FpNi3}b?6O?P`+JZlpcmVM}jnr z8KZE0;qR!o@Hqij=HEXZI1m)ImLJ!MsX�_F0&4(!-bb;>Fq)KZ+;W+zGk1QGA=j zhi@ywv3~;MJxkQEjgugJmqlfp)bm@V^#%=Rv(C|Me$-et-F_@8=Se&8Q)L5LQtp-C z;4%}>ll9BQukxzc@0Ff|!U+4^O78mT0fz1L)qyGUDk)FQLa zNxm%91Zxi+49p~>`t)_W-b<7SA|FG~{c=Sw5?F>8IaV(1V;9}>0tl^f(>fw-Gg3wW zW%n0zDNO$&m076T0#E%80vUk6L|1MAa(Wwp0<#k2Mf)Cz^|mTh#T2G@YXJLW`Cai zxxLVLjMenFS&1Cutv!!wLUDtbnwvv$Jzq0APmzyc@!ZOPDKv;DxxIOQ24YM6T|h;R_x18Fgw z8aNC)+XG?sFiy!4e;L?t$Bnx=^L7aX1Wv-hL|Gjcy?bM!w+f)P!YOh12?e((Lhm}2 z-Ni=-*;W(Q=61l0?#^!bv|_BW?=KznNkj(zPJCBQq=tq@qsqs)X|iez9-_}YSEa7_ z!;xuvO=KrzmUon<;lT@_V!5GPh2s163yB<2GL0Aba9E!JnX2qb4Nxql($z7eT0sStHJ~_n#`LU zaw)yNma?`XG1~)mCVNgnu7dQn-K*7iUi-B|$=b;@x+pF-cyjvHboT~B3x`LsBD7pj za3;t4#^Vf`>TxM2OkzeLa7JWj18N`-4u>PDH4Jtj80S&Nn?Q3bF%-^f##KO%Na6%z zjv~7n`$v5{wme$N&~;w)>PqwHa&2Qg;yjt3=?1BW1y61zG``VrYzd}Fm;cIw`jK=V zW49%&otzA`bx%>N;--H8yX;>pL0PM&#akUb}BTV51It&w&w9GfI?{?sIU^tIj;h6zQlPxMMhh@UVS zA?e$z&`v-Rt}Ts5)s<7mG<+Z>Un2XbAmq`nRsZ+n6OO!iD4>xSBzZTNrX2s{+Ty5SJm_(AF)?fbQi$l|t<3e(A~ zJ>1E&h6x7cE5o*_Y%x(#A%q)on~Kx<_K7cY+8lnlA9lNy34~*Q`;pQWw>BB_lGF(F zyZ24wP0Qj1G@ir$|9?wZ+P}a4->>{zW8Zbme>=>-V{pVZj`|-u|!?jrMYxcFT9p^sJ<2cUq{S#F=k}J2boH=ubL_z+q zXJ^h_Xgza=;49%J@IUf>a-nC=oVBr#m3^WhD|_dOqn(+Bwdt8N_dmLPRAKwe@MeoI zAFF$4L}-(KYeRbtw>*Ve&0lRM*@mvOT}zSi4v4}Z&^o^@uXCvT)HhKm`DfPrtgTQ* zRM^G~{;8%g5_QdzUU9-KR#F6gdb7w#lFB{#)q$G*@Ru!OAj|3x&qiD{0NxE9`h z4}&;-v4Y)sUW-w8l^CZnU{8{~xsA0A37Bcmsy~p@)E^$<{Tj=t-w1uNnh`K1R#>p3 zT{ZeK-5+*SFLgg#c&n8OW^TcA>G1YM%5!o(jigR%YDk^sv;sg@ipM-}O zC*Q?2-R4Xfu~o`u`{Z+%O}bB58!tXmdQCF{%aeKa4*f?nXZZZ=eAhL3(Sdi zsV5eO2Wdy6ei7|<-mh`_`H7%s`y7e;Q?Y?Hg0ahnd=vv`Q^5QSn`$eVsi>TJ0RARC zb2iB0%mwh*S@7>R_;=>axwLct`kxD}Y3Kj-H^JAVMi<>2x-(~_&nW!$@VVRB#nFq= zG>;kxR)blj3F*F%GJKn`|2{fOD#l)gK;X^>`Pv6LRnafoYumG2Vz@x)U*)Y&YsOnD zR*HyPG&48%*%~veMhq`#b=JD#we&|Vjh?-cd41;WxeJ6>?@0gmA8ElCb+sJO0?DUZ zUQ0i?!?D@x|LPPe7k?{6EZusI$$vomlGE(lJC{ zYtvd5-vpQ*>WDK38n4mZU=gJM8j-)ds2_iF;d&-779zISi&kZbG8wdp5sH`F9M6}y zg@tg6=b^;?O*$X1G@RT4ez9CV@C{-^#Wy7hXLZEo#I>Br8FZXl5uB&?oy-MAlD0OE zJ0YWY81o{6p7I^rmPZ?;leMQOEuZFm$s9AIRaaI^`Q?*ag`DvT;FsFa2 z&hdorehAC{WIL>n%P>|?nqoyoo`U@5$%#GV_n7i0N!w&=yTb*Y%-$KscKIEqQ@j0@ zv!7;xu4#8XB572cnMkIDcb>5F;lGZ;zaGzK^h=1^Y`!f1&E!33wT#YD&nm(ic>Wr( zK54yQrAdeER!^rrlnTTx1)nbXPnCq|j#j~d5aBfqSUj_?bfnkd3P42sb>c%LTz;MxwSa}nL;0OLD`}z^+H6%zgdgsIC!jZg9qQH&5@)4C6}9cZ zUzEWNMwQkb?FP@~TB#Rq6HE+Yx7jo~Yv%fj(1l__I_AmcL!1?-cjfc<(x?CvHHm|9 z(Z+%yt{_?ud*StWva;xka=Utc1k@0z54}QdSN3qodZR#g+~*(!I-Tq~dV^Fl+tuJ8 zgsYB5a)0hijg3Qw3r29l4zWZd>bjgXD1qH4npkT|C5Cu!xljpMKA#&b&>OPq9+ibL z*05p+D*Dw(qU1DlOFAugF#9RN`QCF|S723IN z6jU~T^Bd1XQgOeOb`1a0xLDd98~8?vz1WR+!8WGo=Ut*8U?$0He^J}jkRM{ZzG@i| z5L0P8*Hb+A9d9YC5oX%jlo-PCo%&bm}Z=<-IVkX^4QjrdL$V^1yYrMFiYcHA!Y zjZ0{8F?@u2YApHf@ipTQkY0+;VJ&>tAhH6_vVRpJ($vb5eAR=#$j+>MZiT(H=tr7n>?~<71Nw(%j(u4zR?kLNzfDTUB5bvv71@in2@6n5*XAjcqvaY zD}`9v7HjBiWlP~n%FauI?&-{1eW~?ap&!)oEU1PSXkfn+csGh$eHFu>swtS*nN-lB^~ZYXRwu{SW;jJprYs4OKXQ# ze!RaSONz>Aqfxom+$0y-l#FA6-4!g`pY;o}4849Q-*jBC0FtVdd zyY6rW(r2YWANTg!HFYH;B;gh>mCJvO#?;cHuYDxZ;(cLl*56yL1-m zoGR&E_Z7B$UcVeOd2=|RrB^;yG@F|TyXY5^*N2KS1(ywYe(Q2vu9`{pt=q)JruyvZXxqY5<{ufQZT*N+qpW>N z{D!l3v!SNvfoitTpN}mR-U1u3dUiy4`6bWBM5eSDFnd?OJ)%sZtQhj@=WZnC``Xg< zWGR>pMjB@TH63$z)RM&SZxpS!MHM>zxfaB9ubosS4<9!&u8CfZKv+RVy2!DIp1TYN zoiMW$mu)!efQTc>WyZoVeAKnwNBZTTpC4Pj4Xzc!xA$ZaSx!jrJ(RiOS`!7E_P91R zW?{Cu`muUSRz|f@#(SsZ#@dFvdTF83c+H%-K+7s z)t?mv^d41n98DjMUB?{s1c}=gsos_b(bq}kmk;h;luK(u-t?sv(Zgb-?odZIIhi#UY8F@;+_jTHK*k0Y z*v=D><~wm#dal6l?S7tp;4TMU_{Ey!Jh|42WH-Muo8oF{pl>!Ug?GP?%9@uc+1nFw zqMzPFKO1cy==Z+%@1B*kDN%GMCf2wmtib88aCtP*>@epU0=wEsN*YAx_@wu>^{?z+ z2GxCo{1>qw?pXC_s(fFddY$aB$9fSn*_@T)!3^UZYbRK#r@gd^6ZKl}#Z8oJ84!j` ztV>r-LGW?@axK$Ki8>ae)h?fm!$f%V%6$l!qNaqP?tgk)LQ))8YN0Uli0S2;@jBmb zYKJ&I*KP^ybfMsaqxZo`Hl@|)sPezC%%AObk8iyvFlp2W#*i;b3lHu+{*ayLk?^Z2 zoPfsT=XVza%#@lW>+S8PrzTEaBIe00FkR!nG?u%XB(1N?Ik?R88t)0M)NK5ri`1u+ z^6`3iZ;Q@r_2X5@61yg|@~0>HmcfN!bh$9Re}ebf`zkQzznP?yhsGX(r`b`EKloB1NoyBtJn?IkATa({ve$`Pq7C&#kP1z}LFmkfqQZqPjyCNOJirD9o9hE3K_5SmaX?W zFOyWC2XA|Q_I!weN8%Fn3Qu%y-mk5I9tPP!eh>M<2%kcXPkTmT20Sh#5%=%Qd&Y0! z=KaJ7_VsQ7WYQzq5}$)T0Z%fHlAkhLV~uI_zWGk6)#p-0$&?@X?o8l5BYvD&{v5(t z;1sRh`!=!aZlH@xOb*ZPd=`V@_luO-m`x_6z}HkINee!jp1zSpzn@IUU2bX3w0xNo zI*Dp?yZo63*Kq+~`N3+J3HT4k6@f5o%Us@9PnMoqI`HDb%1Dj4BPCQ^P&uM@`--OO zu@Ikwj9#_z2p`+c5W)4!dY?V--4Lcd9>LyMNNe&3G1_;#dK_nm)VLRu0C%0w&g#AJ zOo+t?n6nu^vh>GvInsd5c}dsnE-sQYc5x)Lv+Q0d9kp$ld#^~3Vl6gvsD4Q9%59A9 z4Hi}Fe07aq?p0dPT`h@^y-NAKpGDjpe;Q97tU|X6I2ggp?kH2O05(|F*H_-@!E*GT z;z{;KYg~`(*4Lc^NE0V*z!opq7*Y`&%kJ8%et>XkC)?nx`aOB+qKQKnETx_EM(hj6 z_nX)?mXs^Krn=GFOXw`8FmlffUZ0LMr8rJ?FE$tN9H(HlVR-`8Puz7~e4#V~xm|lX zIr(PT90w=%W8-@clhz3^vC1#jP4;U$z`Ng}asGX1*!zgm!ajV|eRlK_HrZxSM~N<8 zbJN6EvxBE)@sTUa{_R!+2@;}SRu>%%c0!OG9S5CrQ-w|afuP|=ryxl?d#SUiC8yIN zzIMab%pqZ)lN;|f(HJht=~kewCc?vB{wwk#Mt85d&KrkB`PBY_jYV zT23SN^GUwf9%{BJSNF$u7_AsZZ8vtzQ|be?P~oeCJXoDUSs~AZ2dG*eucQySf#KOEN(iZgiHUlwBGgV%XqNfkBkm*PfQZO9?>24RR`}_(;*Dn= zzQ+@d@<7uzR-mnEWA$uNn=Z8|^F$8f+ndHafBMxL$y*6a_TD7cGaL5j=H3%jW`|b zVR}XeYNj_Z@h$m7?&yjF+|&0H)9dY99SBV`(?*iPZuY^wrNkORkLLJU|9&+wCg-`H zI?V$OEyxYqx@o+nO}DkSYjQbXNas2=#3Da0$*nJEzQ&<=Mbu^CW*`YQ#o-=vk}KL}cF~)& zW>p3?oj}C%Ja}T_QY&HMEwfJy1&7mdkcI$C{oA&9B&~pJw;tDa`On>%L?OHS1hz!G zVij%kB?H4Qky&DJqjliE$dz8dZRzsWMAS&?K;mNo7H05ZK%!sr1ca4Ve4)*9@^IvFLD9bB#2({)t3}6O zAAs{Mou_Xc@2_d}va*J5dHr%(QiQ_tMi)-(r<`-9 zz*`Hnu5N9f*fmeR;=unLyz5R@`6)ieBVEyjYvB9gaaSab)31cIbuzR?i;~`R^ZVb})j*gi zMiXK)u)3GWr5bh31TJ)4Zf(Q~Ti;lRTTDj4zqjWVR6Dxue8)9Jw*G|=Xd7|rCI`55 zZseQTFX5t}ehC2n5Kj8W8AwJ)|bFt@{lG$JS#6O42 zEYue!RH}Vx<|!dI*7EO{+*bWO8y#Ea9%O3dJjYs0&x#%!?vvefru30boSz(;yBJhw z@Vi;w4NV{%h&8Zm_3&iwN_81ZC&;?jYxG2*TiglhsjR-(18$=~?V&6|aS#u|xUl7Y}# zvLPYyjm@cf0^%#7t0Cymcs4Extrfz?TT$-2w-L`jdm2-V&>z1SVbXbjtO9T78Ewdg z-ln}oay#1REI(CI zlF8F7Xn4`8#ei^KN%1M*smm2%I=Z3^r5%@jT9LeX`MS>{0E)TF5(|Q`-J?hOsS(Pb zxEYA)_SJ_|Ailc-z?L%98qiE=-e-IOJ%9v}lAI?XCVuO)&qI7`-RTvw&}ObLLusz} zi`?ADPd|2%x<WFr zntjPZnbYI;xZrQHbtdQ7#s2pvWP5=*oyHbZoJLZA-2>eb$P1kQ)M>yPKBx~bk2pQ< z|18nb*80y9frFU+pHl=5;{Ua$=oM})Zd}WU?H^A0YiU|Q%?*}+Uh?7y**g4Ryj=q-)FzW$hK(2cG}*NUj{0P>{5Z4)1O-ehVZ}fgQU?9Eb}t| z;Zq~Nan%!(oUg|-?DKe;4Cmg;T)8>3*elV&+pf%1Su*hBYqVGN`QxtU-=pRK@=ZS}Nt8k7;ce>E6Z}Bo zcjb5x69a1JjY_af2F1NrMeG}Nx=J(TwrGMBkN4HZ#;oy_g8^#DMu@Ub-(Qu)lH_K{W+0)+>43_wxr$YermOF1r z@(&w^8ElyUe9(VZ=#1b0Pg_nJ?PxK7#7Wy$?+$K4&EidG>?bbH1_G>-vZnS3bWMT| z>!7MPy2aRlV&Wdc_B6}>`iQE!5qZG<>WSMne6|!Ti5+nY_!1^Ajz~dp?}tyg<46yd zTp4sYBTP>p@W=~o+b-=&TnOG8+!&5YptISrXzNnbaB4uj zuMX}SFLWRO9ptZI4?-LQu#p!Q(;t|8klNAFETDmD3Ob>2L6K$I~M0-#W@AwVjd zVIGIk+&ZjVIg8qAq4(ydaoJoe50k=l^f!!oE?q0E-b=7Qyhv$|4JTTwLGDil7mZb8 z{@8@Gsfn*W7hZh#rEztl(Q06+WFu^%#;N;?4La6a>sPfa^{Lw?=I)aUm>y@{kde-m zESVVZzNqsnXl-GDn&bhyHDG4T&lPs?@MMcl{7`T3#!5w?o@208Q)&;~_F1!1t{t)C zx6JO|%(~u2crW{2L(N7wOmSi3cZs;7yBI~xKs zI$Ud^Hw?UGmhDnU6;Zp>z?bfVQMffpDz<#vzAY(}9xG{FLp)s@43LUlx(%o|CD)woWJCowM zyjxSvAnU_P6}eQ_zm0m$PFFObsrLt{AHvF%b%3I#p-+k9*5q8Nwhti9lr`M2qu7#m zF%~R2Bcj$#SQO1yaa8CH2H${WP z)L)%En(oX42$ZOW*3&szTsDySGIqq%_x7{egKaT}x)fY#AXajhYQ3+RCgXcbM9=se~=UX@E~k}eQYqIs&hAhFM5y?(!Rrp6U{ zDj#FwioOaxCMkZS0N!Z5+2S+ zlA$aG`fea)nNKo~e`+m9>+Yt##?_8=Uutr!o70&MdvMy{xvi1*$ls*6X*StY)9qkO z2;=3len7VOCv>cOBMjYerJXD8qUUZmyt9Nl!Od_$L#ykOr_FE~sZ}BTaRJ-m9tPHm zN#qLva-f0S($n_+kZBql$PcuwjL%wrio(^jaKB+N77h_OL! zSriv}sY<&}JcY4>`BgKgV2Q*j6eW>{F!^Wpq6F@gccqCZyK-CwP z8ys22K-Ybc(8&!&)h!;>( z1(EP-X4NSx)u{GRDxSNyYzDc??2wWjQg|W!uz?Z)2{OKk$3ee<4P<5PY{2QX%08wE= zJ=w_FQ`A?%O}4Wpmgy^O(SPHPtC@io_!ED%&eSuJ#9pL zIzW@z@|if6Q*Ez&02;S}^%FaNcf2wKO?&b`9ZElD{ zUr~#?SX1kkTWWmgupP^cv5^-vZ1};w7~X3*wA%X+E3_gzxp2ZdVd%k4;p31w0NtSH9M1GsF)~pn`kNRxhY=Y!e<1T^eEU8yoQB$LxsR1rM-wVE z`o=DMQKkfhu3uUXsbV54Z2Lt!_>EimX{^3C+nyBb-g#0YO~VmON~3s*Ooe`!#SrDs zy@~y3*sybXA`TK1bW6-tSuQ5V%ZT76P_4bk$uTOtV!ngQJ5g^h=9|JSL&?7VxD>)W&j1TI1NK*kX%IPf&MYqxm(QOE!O%YIfg& z6MOAm^Ubl=LEuK;WemB|3nT;cY zAEL#PRmews66Jp$C(W$XYQi=!u-9y3qXSeSO2oxww#hmPWy_EgGYh6@@?1RoqtDMq zveN}juSE)Mnk~uw!=YydSh`C3Lehz0fzRMjl$Ia-T;1Mz8o=QX00)uvd2-Zg`|=1t z54z7?;X0=WlzA1{vVdHt)BTm*2j(DTfm7(TJJfR%*q6*FE~=;dqhJCS_37xe**};A z88G&nxm^mUzPRTz_+XA)#K>`R$=?=L9UwOeG6N!~`_qjEANFLQ;Vd$w~Rb5EpPy)!44;fHr+3qZEaDm|Do=jz-e&b{60!p(~ z?~2%gtFEUe<&(@=2Gq?4jW)wn@9jsz3v)LbVD+S(R(-*2oLr1be=XPH4p)2SQvGis zI;g@#_KL*6#1{3{4pvzR@U4CKYSDGE^+PuIJ8x(z83@ zLU+rQ=!U;0`S%IVzT%fl)Ji%hVZq;1jmanP62(O4&iye8-TxESU2+yx(#}a6Cbs>P zU-(114qz68Y0W$@Uf##nbNS~L)(P75*4x&zvKM{xBL5|-a>68#P;xu?HWoav?e2~e z`N?bA&LP+ymLKw^^;D{kw7?O}np0_2<}y;XhTrqqHk$2-DD_(X9yC#l#rdmA?TQ2Kl=YGNjWE%s;^{l5>;a8Qxa7g+ zhV%Lwz3)CNNE-2U9Nq0Rp7PR#PXj?Tv2n`>l2orf9VigpAF{#67U%b#DRIyZf{?*k z1e+?{ig)U+r`p3(UkyCm)|(qEuv+&RJ2xN4J>@?epx7zc%w2-Tq>}3 zdxRq&s3fnl1l6x);)(7>X-Rl)zYxub&n!$dl1Y)S)ZrdZu08zZ)J12Q@yGZaTYiz( z+=zSN<}-)8yJV7cQTyIEC-c6HB3wz1^!~#$AOs|@PR<$I-srCXiu96#n>oMpE?WgT zS}H=L_njA>drHDah{EWl7%Xtq69tuHm(LR%Mrbx6sVViHKmK^x*^{Y~I5KL*(Sm#l z6uzj{0pvJiHwp_pgv*9ue~7-a!D^mq8;5FK>0;5uF#6iM{q~cS!5^MqPw`8%tK+~C zgRxs0!lz?7pl);MO5bL!! zVMH;TSClx)|N4&e*%~nMfxC{WVMs+^)qB)hrI=XaU^Bb<$6P3B_sh>8$y2Z}*JGA( z>wl(@G<@huOaPn#H5D!uqdG~;0!wAvmBOZ;;UFNim^&NL`6`tCRYJeP;cy;D- zdu)EPZ;CX3QTdp*L8mI(h+2(=?U&({ zfh;Lr%%w_u)aPrpkHpFLeeYYCANB^$dn`-AZC-F)1{P^?uEq+#hq0Sqc5vKzMXrRd zuxWH1-y%aEY&1Jd9BzOp=&MbV)MXmGs)+?|ubp@Hzx{%O{1nb4aI6HZYWKN@dmF^;EoglIX0D34=N%n&xfKP`6XJgqE38a{W;v4F8|DQC93N zupvc40!_M^=+6o7MwdF0sl=_+Vx<)5w}{PC+#Nre#>QI>#8s!KcXz#J{M5ByNh3RL z#GtcJC_8ih0s&2tngRpr*ES@_rt&!XpzP__d~2O&X$A3>4GQGEqP(nOs%_C!4}NUz zRDQpNF%_IWX#OY&KZ}|S;ks=k&(}!YpM?umcx;^P);I5dR)=_GP~q|8f~v_YC?}wm zteprGTkb5R)D>Q?is}bIM*YGAwxH=aO#CRvC6AU=drK<8#qh<7js^vmv-2fYYK5l6@v0)zi}pJcEtw#*vSmP1pXpE6 z7YZyK$h#J*V0EPbPj7iZaDH!+WH7q)`q*HjRh4m1J)l&N}AX zjWMvgJ!TU5$yc8xwuhKJcOn?1%+FVY3;aQn)3$~Cm#pP_{DggLhSpb@@8MJebwhCr zO$wdo-aJcYz3Z4eWV=03j?P{FI~R2`^11O(N+sgwoYiw9HCifDY}PF_o{qY{JJ(tQ z@QAIwA1}W<*Um`s{Ct1bV8!HS_Xj>`p-!@XIBtOE7Ww)19n(JTxLe-k;MVYe8-<*i z%!`?730E`7@Z|d8`EDBMe1U|17%_%?UP@^Lpg2axR!-_fu_57*!o3Dq5&xSLY5ig8+V{2=kzDAw5V*ereE`y<4dO1D5P6X4T z-mu=TM(POh*)6vpS&OQppq8sxs>Z0cV(NezUR0x<#2cEqaQ7l<%J3nZy<+0=)m8p` zb$<=fJKC=ljY9PXuKb)75z_V@EkfIhW!$p>SsnXDLq;KtI2GnlSXYx4hw=NfZ9vzg z6i0MW3SOO&uG$h7HCw*$@q*o&K(q4hLLQ7SWPSy0SD&aqZqc%|ey&8jc&S3qzv^Jc zbLE=gxb@Ux9f|%sCm($elm=?lm{8k^vtl6Xng#(&x?1LoD1V10MH+ANx7F3>N;--G zHflJQ_jXO7;VoIhew!unBB_WQp8rlr%a7d}P2G7eLs;!$tjMFCL)=5ubhGvjZVVAr zP%+F74vtf^NK@D@pVy+kz}&xT0erw5A*=hXvDnCu8u3rv9R3WxXQ7KC`8ladD_W+8 zlL-46%yj8Bz4xX{kA0R(rCtv)#^LD4F?3@Nqq_riX(0Gko z)lCRrh%bC2exPO$5WTUZn=04SHW%T_PzhJVL>ob}0^Rbq(P)4IlT2mj3P)zz)%&<_ z(TXakBuV<5d09|C*pU~`=DQQ zMco+C{#kQKEz}Y&#Bcik9CH5c^cOgfaDtP z&1ZFsahCQ0OH}G>e4uKB7Z~&wCOhUy^4Eg2!Ss0;%Bb+_+mw+rpBaSc-hL8}$N&B= zdz9f!PUpS(e$mk7BEM6ohF8OuaPWgblQlLUxanFGWp|Fz;3JH0Kat)rL$R^LT$5u5V?BG?%X#QNqGM2F2}az9|kq;%rKyR^|>JR6;hKKCAn z{p1vClJRnZYtP4eE%B=d9(@15e3J(nNNJ9^Rb)QwdxS6fYoX6CvtR}cEC&WN{1yU!MQ`s7@kNtG(K z+L?~qPrI*o%`!YX@r96kjy|GcDY-^YHja5_=@U+)uO>JtW~AtWxszxDtnjIQ)8ls} zK(DyXk60{q#{s;4gS6tlcNV_0ruI$34u^(8v{BfBh7oiyOfgKkE^vq| z-nlNPZ5wTu+!aDzpAsM6BWb{9urcmBl6<(dft85b{S56)RLWz(ZnUhneu-R2l*?C& zYrV17^1FyeDM=}>u-9uX1#_EKfXOEK-WeqhU*c~1fTM&iCi<6ECBH2u-&JSC>hfN; zgs5OJS8}hUyu58QVwT9!c-typcXj|ofy<(p#4ja0doYn@w7{|GJIxtH*YjqmdmzHB zv6dDbbxv+5L@eHnHSl@DS6iU#)~F3cFzTtCAm8+53--jr2=SFB4ru4G9=0R%!_o?cF=76ifby=!>+ap!)4RxN7t ze3yz;-^233ip=4wa^y{S?jJV6lT&;TGwc^E%(LP=*WYI;k+xQM@yKm!;W(s!$djw;wZ+CgSKe9=e3YY!^sx{?Omqhv6$W6C zQu7h0F!RoU&DJddUUTB3=bB5mB)!jb-X`FHwo*DDM|SuVrPmbv6P98#I|JkxVGmVR zZgIa{*CIw5Ahf=}W>3L)3gLwWK>Fm6u;XIJpOAOqlG`Q)2S)ZqV;)vPrg8eB1=LAi zPwx0UPm3<;Q)Dx7t=}lb$q*Y~iP>Rllsnv)en`A9d7s5J@|$H|K~jd>-u!Gyy8xxp zxcL6+RJv~{%y>q1vwk8FGa$~cA5O3QQ@q=y8>6why4d{j9&mu(FTT%QlR*|nHI7Kq zs~$d)S+?;4V8@nE8G|dnV;y|cjQH_F5o!Aub2%B?y~x+zN_qzeN|MZ1047A-_gVV; z!6^|s&&8Z0=9~YIvwE4@%2-gql$tO{ay~xUl{wV1U7{DT$c<#Jgqhk#2q#Et$0%%Q z-6(`3vZJ?LT6Z+7TkeRU$t|}c?&=ctT(|PqvnzkJmtavqo98S`{Df~hQVB*l?v3Os zO*japzbSm^uim&z`Qkb#D}Y`7c0Sw$LjmH>MMwNONbNpgPa7XmLQ+Pveg(vvCG1-+ zny0GliCW~$zFIzTO2r5Z>6!Udba-zqp`~bmosS=g^f}si)t{1&pCz4(xyvr?2A~E2 zndFYV==#iRV>ppj!4?vrGsz7Aa)LaQ`{Dj&P}_T8wJi^p^>7Hd|JM%pkOF;AEPj9U zoAAYZa*ep+-!dha{z|SK^9VT#`o0D=As@Jo7@~VhXL%VI>92o)y^-AdTtIG~V_=-bT zu+ctKyz=#TkUO;O*f=&_i(OA*xGXY|MV> z%cA*!_tJwI3O{yG01dGqQxY1fHE1MHp+p?XyAfPae<;e-O<6umhX`~X_mn!28tblK zefG{*K*?)#4d^-Y;qa$}l_QS*jA@>HHGtB!c;_-IW%s~s3&gQx=`%s#wWP#KT_kaaFG- z9hJ#J?LDH<5e7LLffIlxF}7BSX5`+{oZKl_f`X1qouj!)3Uf%?97$pt6QQ zWl{%}c8g{VI>o^?i5r${rUp&?QGY(_7Z<7|4qAMJemlh7+zd@$0a=yT9HmMkM9gJ< zyCr7J`2XtkV&$X5?#@E^H&;uBxatHwa5h4LB#}?U1eEseeizAlB1c~I?L}`x6ex>N zBnsq#G>AOA)^_(!y-SCydLHNXXXnIK>`Z>VyY_d=7&zLj$)#47oOE()URwi5LuFoF zrn;Gd0zG7;+?I0ZRtw0C0NE2s%3XMM(F7~Mi!)?=09gH%RPwvn;j5}w0Eg<;aZszJ ztRPv`g}n+E8gOxv@nB73U?<;szK_XU)A4+6gB+7xFEc)elx9a3gas=AC{UxHN%S3K z6izH0T4u5fpT$eYH*y|w0b^Pk@t-k^MTpf_Y&{+%n2jyhW>xhoGr>Ms}PqvPPj=mSp90B{sjwrzR z#m^rLIsSe?<0#!=Pwvd|XODgjUch&<4?xiUzx;yeNAiMe-97*pF8ykJ>_q=PmkT$3 zny~_{L}lKd#}R2|)o<2k1CmecQqUSFNvo@OBCh>3VYL}1Y~^KE-_xUD&PiS(MGsLQzdQ~nnrcam%V!lEfuGn>er;FZ-BRp$W(3HznBI~Zi${P=17!$E zdDcGIbsz4b-_3lq*EmTyNfTZF`&WP_rsUnItxQ96A@n#xBG_{Q<(hT1B_fnwh=CYq z?2LK{XE^m^I3Z|KPDua3ZkoxN+%{dIyXhSbihRjMb@KK*v(CJ`X56sS_&DhOjam=X z=TuIi<4*hqLjGLo4n-fF_j92B>pS!1Q%0HXQhRKu-wVysY87OTK9x|RF>j>xb1D%> z<#mIlA!mOuZ*@IARLM&;Cnj0Cot#Px>cy)v@RQr2;p@Xk$>wuZMzNpy!;Uhnixv2w z%U1!dyJ4QCMp#cudrh~}f?TxBj&hgiJ>_j z<3@ieDdKzp@p`9@6LtI+_>@j>1o@ zEMOp&(u{c(@XCBp{-?e-ob_}9O^TD(@`y{j#PuVio6DSEP(|&BDqa6|xizApqo{#- z)n4(`#On~2DbCyuvFk5a;f>s08V)3-jg(SGGw|Tl@cgcWD1+%G>K|18yhqtsD46b}6NV8O zwKss~yugk)&Cov21TDNcjl5HfbU_i&x-qX!wN4MA?Iy@CGpn~GKZaHOO|@PI=J|Tj z;_-O===+|;gO;{Mt%6hK)~?=v2yTIMvHB-YGwA$*I%tVM$kjMKL;qQ%zgPJ`i}at9 z^gmsp|07P4UquWmKNg)!?@L2-loTxZ67a<&PMregIznc@vY$@0vZRtqk=(joG3*!u z#H^(;TL-a@ppL>887PyX=kXJ;AB3eHyh^M==wqHJFaob0o&%Z}5*lIdB>Tx6^&imo zPGpdv&9HKB@ieq&sMxp>R5iUNd9YZzIT$9ozG@E&E5)L1W=6Y@sx5HVPcAs4jl(6b z(RuwItJ#Y!T5rut4FKHn{n`Oglr9K^-wi*?!M1NNs0_f^Rk4oNfz*0JJRHA#6?0Op zl(v54BT<8@fJeyzS@N!vKi;c5zYw8w-r&H;k^IaAgmTe}q^AJRX3T!7(-34C%zNh# zU|@zh8L=IREKtnrF5(c#F)srRn8NwY#b%14)&Ae9>nZih)TEMgudU@N&`;>pCR)!hV8jeyuTxj7&{p%X!GLy)$da zi}Yl!bDMe^`}Isv55*r;bpnR%0v& zG7##NWoo+XcX)|ucDy~3kBVC4*YpK5bw@!l1pp4O-LuRKyk*WV9?Ty26t4uz9!^aB z{v+@m6jfwmuljw?FeXOpfm>(g_#uQU+Kc#3>>~<;`|Q6nv->w3^N1%qL z4l8?djf@VT^#>RwDJV$t?)#I>A5KlZHb?U1MC+QT)w@TyJ&>7MCVCMY?)MAeayKUJI{Rw% zD((A2kpTc&WCjI68g^~3(==a)(0;f#SXkxQ_ZKdnwqJ{hOIV0&Ut~6ELzcMBv0VZ? zMn4H*Kfw9K1gabf1Gc>+qO6d_t(n^?T&e~WH5*-?4N`@0PON6%G^l#Sc=NUcHkE*m z*gVNWm1=#XyFBc@x6|6A*?^d+C&lTeZ%GE*15NDewxFPXr&Qn7^7Iz;aX5(Z>V027 z3Mm#Hn$GfRO*AFRPb12aLzq}3%seF$Nj=pZN<={C{HowJ`|__!2%cZOUF?oIQy4ee zYQ>xAl8Hkl74cGnS4PQ`Pccpji<1QOo2{HJ0Y^nU>?8{JLAue9VUT`E3g4hz{Z!$+ ze4Z`=9zIcsSZ=+f3&;KFH3EfHwB7h_&f=KhSH`M|2p9$meKf8fNB-UamU7NZ!Ah| z#CJuXA#6Vw#$nc$tRTIK=}C_tum{zS0%lWuc@T7v9K@f`8;#^~kjOgDTW?E@ru|I5 zKFjQgRbD>ytsJ8=@ZR0MDks?VRHyjI@A(S!VKS|tjJh|(f*FEPouUo{at01EKh2dGjcI?;o`sMBi zwSvz#-n3GZ4v5qkK{v_Ahi&{j^Qy+rwFpdYcIU%!wLaQ z=jw^A7+VNCWQwVOj&|TzzeQ_LPzFr}k)qbEEecAmSdk`N|2!_GaC= zRKXM3N5b?P2)!zg;!bCA*4yIWQy$C^hf7p`)|C>LC&81tdhLd~PessJjy6@>)fb;cJ9@HjDJ7Ask#4IHB>9*Zud8C9Au8d;P7W5uyS- zA}!;^t!$xw0z;(y#-6q$bsKxCy;nuRpY{8XtJ=@c!YcCYN7u6~!-nQQ3Nz_noDo5O zw9RHgP}eoO2xq=12u}^F(m}dn+2WCQ$X*_?o(+63MpO^ifT{IAQY;=biRG+A$x12dyD9_R(VJ z2UO2VZubentGiQ#4BOwIqaHq8)+H05<}?}uuLIFPzY%?@OL&A=C4YGdJfeMb3QD91 z*=M4FM^`(~G|0-;-JcoFIXP|m-VW$U`r(@ar~McBkR&cvRs|sau^j!h#(oeixOQeL z9HWi~pOeU^iF#-GomRW*9PbU*Ym=>~SMhY?JP|&%vHX71Q<) z>VNbVkMOz-Xe$^p0XPS`$9qtF)gt|hrK2lNSB6yCbP9LIeFtPe_>FjEm)Ym$;Chpt zw^)Jbb5xZ+R3CXXSTG{7=`lRi4IU{rreE!EM6Osp0+&(`xY4}%Hr@WnjR@NgRVaUDjH9!fQaoSI5J zFz`Qn@g8$SeRF{3L4*iQ5J>*Vcm_dGD3UKiuSEX>1zf`a_2ueQ{1;3thLtvD2aWSv z?gSQF{Kw1bsax8tCJnWH^~GBGdXIbfp{+$V2Qhb_QgpGEG@3h?oABNItTr|TWC0HA z4g}PahfJ(jvp2>og!HHIc%btWRqLCwLi^pHdds1iHUm_X@f=uk{E1^=G1_sRH0b2I z#PQ%=EcGP&9a_5I)9~nxn;E<(58{JqgY zAIey-VrLH}j!M(j0t&UeX!ZbsAr$2-W%RwU<W5n2OcwsQo{EDZfIM&1Htgmi?^mh_ ziH8IvzIR5Ay*><;coo}F7D-BPD#kq6+K*{_TMRdv2>Eb!K>~gJy?|nO3gMN`SEC&Dkj z2zsBDg%**ptq@*_0AW9qYk1i8XyIw>v$-#XDEud7*Z#3#0tIQ_tb@jTjT>9e1i1np{gx*hfoXcXkfeU%llvYX-3eWU0yD=J$QofH{PUA8*Fm!c^n)hwYWVb`EFz z2g`BW4!$+f6LyY8fa8emzT8UEt#h?Ryj=HKR-IkXw@B-@0h5o2Vqsik1taT-Z6w#a z5!JwGkz075KU6lt?>Kg}2<*Pb@7LS{&?6z+)h2!XYGfYpsba8WZ^E?;D(1V|)?UVg zhMsEeC6toi;z?JmgZ{*sI@G%ii#t@OwoDhy4kS12y z4<880gyw8LS{}@5Y1J;!Q2-Xe!iQ1QUDR^Zb(HXFJcEPrx+R#eZh3|zdw^)Ui<#=k zcNI@j2n#?t{_uUj6$=vx#0W0vs58V79$OX@Tl z>DLvFa;P?{lWomdCprIcj`l6L?nxRZ%r=Rm>EU`&ZQoaxX$o7X0u@*X37m`jm_|iK zrTA`zyj~V4)hU8YgwxGafzSc-@N)@8w++5gcTZaLo~oSXVdk_IT4k? zeUA*t*Tjx?EcO89;IJ8b?SiAjv3&{gv3jUr@r;_Vd-U(K{r28fv{dRJG^Gt^B}mUb_IUGcQNJ7?%v~{5Cf$AF(A+ zUva|jWizY#+D}A_(`ls58c}Rhng{r#z5Ut&jXaI7yr%??-#*?Aof^aVSdWgC&8IMb zJe2D}Z-2&SVK-}X>U4V0o9*~K)&W3HRevP%v0;~iPZn3wIO?Bqh=={EJIm<_nd zP>}WBJz0N7TtRddw1%E#2+f!Jp8cQRBS6xK6e8rLVKgi#Z|UJ*^dXsF!F3NW5As@k zsWX;GUUjyIj&LALtD6_m+7A>!PGeu!CL2Q9^FZ`inE;@EXy> z73q<^!%Yzg;4Q7jEbk4Nn6BPKs<=B9FA$(K&boOBh}WLP&Sm#zc9MFfn@n0+Xq8Ih z4b=VMPRdEh%rP#mYY{rb&ia#z9If7N!~dIe{OK!YjLZ(yz>ME%b9X2llSw9SyC*wC z9@tcm&1Q-PH(Uv(KRTw}EAXcKLAVc5+T_3-8kU?MTs?Wdb2UATQ@8kiUp?}ZSk0Hs z$5u>KE126nLQRMM=#2L{0w;|e!Qwbxbsc+o$noAH{uY^+McH#%SzJhRB{oK!n)kCp zXn<99uSu_Qt&8~pEK$X|VK+`w22ovfcW^{C@DG#G{TGXJWOo6e;6GV-DUlKGrrVRm zr^MMaJ&*r;)ueoQrHc`rJ%1XqCrGcrwlV1lG$<_A6re9{>@lhYxp*+$a`-cAXuQEnrbFJ&t^;IheSOOVY@& zcjwH@^ER1tTW%SbU<$t>$?*l;S{Bs?3EhudEt(oyf8ik|CMun57j4iv9yzZaUkj2W z{)Q!g+7%Enq3R?kQT0gNMG|&vZ&>{pxJtkhlfO$eS#=H3Z#nRn&p6FN%$XkCKahzmvPHPx3 zG8S!lRPpI{#BY}UFB!Ffx2K;W$s%4(YE-zVd2~0ze*X~21LG9O9oZ@?M&fFKaPLaBxbFU$M_sQ+tR->~8ZK^_m^)*}Az|NH{Gzia(d0(VQfMLDz&PssBiVPmSw^zs6*&>!v_Jt85z@h0!>X|xK<5Jlu-Zz_vy*aEX8 zP3id9?}!+lEE#@Vzew+GyE5cC?#a9NlHG(TibT#}#D$odaV)=Q;F(b3drodg zW!B$zMI;Dc_(fB@@`hxhs;b{oR-r#1E+f8ujtMq;#bbh{RB^^?w& zpI_Rzq))+fEmgcF>UXF67wD>iBpB1@w8V;rbm%-`YMD{Te|(KaNj!3bi`e+3GVV|31E#@CU8}=7LmLmt zkhG2Zs_phn{|`eOXaktcu-6YQ|Mb+hSYQZh?KmI)$--YH2YND~o~`_+r`G{<{f&qXsyzC=Pn5^6qLgzyZ51eX}y|_;|O28!GlEFr31V z@5z3T6>8_OSXsC8z{m#pPE=I6VZ7(7G{`fX=kI>=0f92qr53D0ZaM8b*D=Cs?H*r& zMvcPAy?4~#-4^R?fPg>G36cEfbJ}VG+5r^v2^C({hNVuOnCJk$20bkNbWM#x46#1D zpvm@S{C8s;D0sF1=DT(c07Eh#`fk5a-%Kca4zy7Vt{yn`$?scu&h2RADaBXzo7B?3 z`#{yIW&mdv{d~Y>>TbXMg6ONu5s9G0!~1L+4-GrvdyRXwri1>`fhKz>mHpFv#cB_CkW!BK(x0Ub6cqVd~~G) zGP(spC2@o;D0ea|SRshJ-$csg?6UqIpn0zQePzJHb>Z7%A%`G~Pzt7x7C5nuM@l0@ z125}b2lF08716p$aU*M|>dKP?^i0Mb>nRyU6MU#_`pOAN?s%`pg#3<2+TtZSc@$uo z_;m@n5|?{uqJamf`aNvD6|$copC_lJxlno>|NKsFhjRQAgrWHj(FcE8hoph*0K{TY ziJxwhk8*{?rwH{(~4JScs4{2f+OGR{x(cM41Wh}5dZ|<&3D-T@TTPR%U4;Hx;+>3|AcKt0kE(iAgl9-Ukz-#0g{hP*zDA1{sTUF z2|%JHys*MQ0h_n10I+#H8C&oh_5_yG4oEVxn}A0D_oWS_9W(*Z^Sqi(_YboG`Ysg! zN+e+~{}ZlZ$pLUpn}crg<=+_XN@TREQ$KM0tfQxuc^jFk9D}<}|pN8(=iTrmW|J{-Q z?#O>l>%XS;{|+B1AM!)!zghtQAO3P&Y~h2!$X7Q0r$r8z<^F(NNdl9!6;mnVZ&lf6 zf%S)MSa1Hp)q_k3pML`1`q=xj&NvtRyGsYW_QAhk=wC4OFRJ|))&6@t`M>;l5;-kx zQhw(uU*I$X7t0mudykBkGYVi4cI{EK6}iQ-KYP*{!^$ySpc@U)?-lozIKvGX9{lcA zR80ecEYSxB5mlWMeK!q*A315$_q?{Iz>7vHER~XqitnzRpBQ>?;Y?!APqA=RgXOx; z0F1}FCk{|TyUwC?*WFd1(5Oza{ajT1yMp~tuIedajKcIwAjq|fTfHRYx*(rqbm9%% z2Rk4ieP1O`HjI{pz1}ixNdu^c3^@HqE-b|f|1lNI=|w2hV#hNJ)pt0LQBP)hPmL_J z1PY)-vu-1C<|nsQ!uNX%m~_OsW!20=R_CxkluC$2TRs?lzlWs|>> z=GS_46)w(>gSYxt!bz!M6+v!+;ar;ayi(%^xK(x zYOc!#@F+C^%2DZl^tC{lYd6JxvjDwxdpxeEz$Kt(gZp^ce{!vwlP{^&94yIh{Y83P z&_yywHJP!Tm;C)|+j&wFpW_`MtxB4hp86LsOraDE!qc3I>wiD?f4VQJR-QYc;qm~_ z@114t!HzVxYuXD_hHpWqj)wgqkq0vPIpB!BcU{?9c^a!G|HI(xK^HsPv+5Qb^`9Sx z8`Zgvhc%?#31q;XQSEh4Bc@jlZll4tUEaY+qmAF=h02%Plp_mOl65}e29;W0e~2{0 zivzAdRgAt*o2^TDY9aYEh9zbH456dKs+DWjf4=e0JTxed@U_<(C@E?o?@C z%*fTR549f}E(5}t`Mjb{fsn4a9u(8omug2?S2{p~Q%M#`wjXElQ4#Uliu&=jF9zVw zBryiu%TDn|9{tE+hWh&9&LhfO^lSiC^?OokO)djJc-4@i*r|F~^LaU`hyeyb&@B_>|7LU%lLl+*!C$ej41;IpAaJF-)zI4^c3# zTmuC9S}n)R7j@{Zy5h7u&cRMrF^l&jjt>uOMsu&wUgW70V}#BQ$G( z_USFAcjK%Qu+2xN_w}%0;;GLF()sL$9*t^fucbnZQ#^EzujpFiF3!3A&q~T&haaw| z88!I_4`19X8gU1mk!up{{*nwkmdil90Awjo$tbW~)V@SI?>V;1EjBbiZ+go4PhDi2r zkfu~iag+OFaPE?lhd?X!sJ_m+yOr=pciCTB{uD+>awV!Ky-8C1Y{i02?^>x@(+6rc ztw<841~owLnwI+|2B@&=bfqFBjp3D#i;B8a(m0|!g9B}%?^yRHZQdTKeI~tn+HP-7 zq6{eGbEF^OlpAl}HX$?wPc*If&I}HVzToPQUj>+NY)xtxt$Av>L^${{rf&W)187#l zm5V*Lv2&$R!S2Kal_wWvv zF}ds<@Zi8W>=fEnvu{f^t;nzj)fyqn*`%7V;!r`GqbzpK$orzsI?J$z{>hB>;fqAS z;ecr*R_wuk_i2lSN`fG2{9xrSrQ-G^RN-)A4tB-%(MAPcx?!M&Blci0R@dl0cFGTK zQz|&+ez5ZF_$wiBb23gkk;lvATPyLE+mKCXJD<`uXq`XOO?rvaYnj&c$~7!hVC;oq zwUf&Jf)Y7}PQKRTF=o7)2%eV5$cQuOsvUheomjiD+$`HRazCfiX`0*YmcuW`yssog}M66+p7hECt*rhn;F6;FNk~D>O9T7~l z`*F+S-#l*T8a-bAR*7P z^%ql(=(E}LNq;5$lPt<_7SN5m*@NAJGU+8(d3`pK&bX9_d-0PedeD#;zk;Oe$=aN0 z8DdojKG)z=V&pGtJzi!JiYpFSr?YVwuSZhXl7l64LTvgv!;)*_N}7&2%@Qx@F@9rt z`&F)Jj+$(GkQ-A$%WN0tHcOv4HfZ>B50YynK9u6Tz1{?ODG`PoZh#-Rwggj((Rq!o zz&HwjVcA@@#bbD$7syTSIu5+}XUCrbta>@QQ2QJ&&O3&Z%}bmy}gWJ65?5vJsI%!ux?K0W*%_?FhhS9ZpK z8R_0d8Yk7H?8P4_L5?rSv)bRu@5n_y(Zz{$v)>jK^FKMbwQ5OTJKN=Vy)*#FA5M?0 z7!!+nvRHw&b+J9(qrNZb6K|vT$t9W?{hFNVmXl}S@kW4SEgrjAATj#DUx~}Mq>mmk z4Iz4t?xME=)Urg`OOP`H{hZdx#NV7&Cd3LEje4d0$(WK&(g8K4tl5zA);nM zCMJWr_j74_PQNJ`fBS_VAjwYpl04=bC@gzKsfbLf{F38!)xO)OrBbH8wy$C2<+?m) z`K^MdFya3F-N4t&*}89tADeh%FahH<$#q}Ek4jCO{Y?1Rgz7yvnT6ds=oFw#9@{UJ zUI*XzOsz~qI;~su5Lr2sVTXY}=PQ%D`>0Hux}XNLO(>%~(qAfiZmZ%-Zl#g&ldKro z&0X=raGe4g#UWRe&ySa{6u@(EFHA-~1Pgk}Y1zdNb}Z6-B8b=kQm=GT-EPokME0g# z)W-vW_xf~Gyi?v!@E49*px(8=1k~jM*mVrLYZwVpzG9+9t43zqjmg3X97BP^w0iLc zB5!-8Xdc=&vl-xB!R^~QUu7s>PItekIM67F<5{TXEw%u@^i{v+5ZIu3&Eb^c#p#9z2jW9wgwnE*(za+x;#}w3lmqd> zx;gn@69|zyAf)6iyz)w`vhsUpw<;{M+rf2=!YC+E50>>Lq@oaR2yo7{4iv(Y4px0K zUjqetYf7%;*$gbEjlMem9pH2ild`PlQA^n zX5v=lvrRsoI@f#Yn&-P6TIn9xcf=$!8>dMacM0feFy)kePacf}#Y2Y6cBXy0(!@e; z&w*PCMv|0zI6KY_`i?Gk&L3g#r^5r*9(BEt!2PJ0g~@JC*4W#NO{)`dShQcTP>LSC zf$>`#ujehUMRV~)J(&j2#lh8>Hf<+L)m!kn?GL9)mmZ+Yxi-gU2%R|NMRPVi%CP|3 zlwmJsfRVTW+_W{px#Z^-xX_iX25|kImY$tHf~aeT-nQT23G;@Oi`ath4?#EN_9NeP zFhON5-;npCD~ih$L%VNPOG;@?AGSt(&W6?$Z=t_62bvxgB6N8{hhLY|6@e1rl+Y_H zV#CY5h_B}$eC%T%v6D)9zw1_VgWs;r3U5>U1q4u(k;fEo`Nm?>+_(1y+_%Z9d}onf z`f4&_fSB=0v;vweh~x5(`*-!l3d;WtUj?ESnkizA)s{+#Kl{v>^%&;t$3G^~cbKLo zL_MkOxJxMk0;(DdSnk~;>aObUCd(F!#mB6zj(<)zf=%;@cdTljZ6UBuxA+}o`ps?E z1svRi6vatG^;X@7WB1nBxa5nC>Qao1>)opx^+M@{>#TYVz)hUDd0MK}@!l4Qcq#5@ zO8^Y(y$O3RB{rVSdf4FiAY9hQoPQu?tn;fx9!{+_+-~R~P|gDmBUUd)F4w~D6hRp%R2HonB?8wtLoM_+(-`H;bi^}wau{KeTGFx<5A?iE9Vol97Y<7CD`5f$H zu=)aE?UH*>tql*PvN?^_NG}GFsS^5NY~T27uUs8a$E$aoyokGg@7-j)713Kj{@u`e z8YWd@STdV>&{)?#bxO^q6UkQGFoEpSzW}sP(S#_S5)(%Z1=$U~_0x7X+nHz|wV!7P z*G6x7+0sf|w3X0$;a-a;95L-rIaU%#(eIaeNFv-0OC}!U3or&iqIEp6Ny#Ir7k)RT zN-N!0%gqpD;lHN*{*pfn)3O)z^(K7zix3Kxq1KCZAQkGTlG8yl$xy~E8aO^9$iDwe3GhP#%o4Q>DM$Ps+XQ&b78_O=(e$R@ zYr-9d**F(FH{H6!Ad*FX$==*LizmhlkQVF}vhdDyp4A0@#nTugzUKCHC!@T&XRbAL zbEDzcv@USo5mC32z(ifrzm#=z0w6|6TG;ge*g|YwDFyG`C+DQ*m0kdVI@n`<vY|FbCi;59!oEaM~=HKDZ09rR~Uvmu1S!;&NcPH6IhmvAQP71Rd;4WBy3GP zRnihPzMd#qDE}oKMzt=@(;jO>6t%_zY0j^$?dVAqtAIk_ z%JB)B2mQBrD>@hij9|w7cRBgRrk-RM<$x1KRFY|Yw#6invOWaHu~E;3JGwFv=y-N}RU$?+eey>v7FBTMOS58C<~5W=Kb1{2IQ9 zet#a~Jze|abD=RpT!o7lH~+&H5LvK)Rp_KYdbK<9*7)B@qsW+}tqa<4WvfWr^edONn2fFq(j1Yp<4= zdFni8T;0@;vvAhM9GSFy-!pLswuwn=BSKG$8}?l7bCY4t_5JrkYIK*2F2}jsC4Ih{I+ngiOG3 zrr{oBL|v*hk|Zhh0y(mAHZiXmN)0>Pg_BZWsVM2gRlVH(F5Sdea^8OpH?Q~fm$XRW z0TF;ug%z6a<4|WF^7mQ!32!>x%3uHvxj@;VJ=V#4C-Z4`7oR}N!6p5Gd~2WRL)8;s zGu)N<&-?OX=iNAkusFve=EPf^555InP~y8-vt6^|s#X^(eG=MBqLVTicyf>F5 zo+chXKsRKK0c4T5x1Rk%?Q}`-$#G;Z_cJYJhKX`nx8u1}bDfgX_d3d8%m{d%gF8Cw zDYs=`+FhF$p%ftktBtA`Oza^KCB_~*M1%tj@Rmv8eQVJz%{D*`D!Yz!qc??b(prS= zS0oe=pc`n0$Bnp+Z&@v~f#jazrgGftWGe|xk(uE%0KXhHf5T^==7PDKWMiz#k)iLk0g;O3O> z{?j`P3S)m;tS{IiO#-?bABm{9_bVB)@SBiES(Y-4B&TUegkNS{8C2*7blM-*Ra%g& zcU405(1qh}+%HF>p$V#$Gg&CvktXZ(923^NjDY$CoyDHeTy3QT=-N zM6D7Nj_HOu$OFbZZ?fIAHhI-1(Qqna0G4tN8R@L{?9vn(RX;TW4c)em5Z}v~8mfTG zsOBTTc@p0Ax1}wnq+c0av|Am^OpW0%Y@cyJ%2~{arBjs#{pucH+`Z}q^a1og^M^#+i zSG(c-cf`!WM2o~%j;8XhGr)5B=gon^K{^@FLoRDn*~Mm@j=yjv0;-d2`>Fi*Hy^F~ zK}#YxF&Keal-DddmER|;%qJn;O+i4Y6wm%V==E~gj8f7_i7C^8C~##V*-a3dATH_c zg__3%!fQLP7=QWK-H&(Ij6B&0`Q);8;hmEkO62|_$!7$X60_5W!W2ItX{}`>7oaYN z;$B%{6kj!MD>g=)3ka=q!FeL|b6r!Pe5%|THC0-u8a*Xu_Z!Y1wpy_{4UoQtUZ2+P zC9JxWSd6R=YuH<(#|s8BN%X_?t8a|-SvECUs(5lPqNw=%#Myu57FdO zpDV1$nyK@U4c#DC{0ZTGCz{d;>F$VlOr*Qw2q;N}yC+6~y1o>Ue!sF+M;ZvvguGV# zWQ8h|;=@?<-3n{Ui^pPaOAc(2#u<_qksjuMOWn$tg6;zQS!YXf9FQ5_>}g1Y1b&}w zRCSg8vFWbeEh^hGXq_W~28uQxw_Z0?Z1g=YbP(1ZDJa3jauJ7y6Q_3yNR#j^14phV znw@gMAtgj@Ar5SFj_bdF&cbS);03i?*}EOS)Xw= zCDohU^%48pBZGtCkkWD==s5P5xc<^7=ZL>yh!0dSs{kJ)?&GwZFiP|N@r*4&CrjS^ z*dC>kDDIcz3zUrYlMNi>8&)~2Zcy@>&R9g8tG%Pw0Hg)FBT;kQe(l`> z)cxMO2c-EX8tLLSO1&uxz08?G5V}|OKk_-3#8PxSV>#Gx9IQ{+O@=t1I z?;$=5A4%@&r)ZA!2w)#A6~CQgLW`EUP&Uu zMQ*C~i}oNpAdmnSlgz;keNQ>`Cj;9uL`O%rq7)f_9ZLXhN&rMZe~^jJtlx5vdy^5# z!xHVTA-FX0gjaqIA1_rB?D0y)8!d|?FKJmQT=p)IL{d4yKuwcO2iY~huJ1DW?-WCN zIt`gMBcF-ZDkl!^Eq)I)CwUj7EgNhu-9)!>C9ry*ny;00mS=nMeO6d9AL^TVTKE9; zJeYEZ^fckCm6O!$=s+qCTLb9c_A0rQMzNv5HkNT>U z!T#Cptt};DS0dsupdimQ%=XYBO;7N9h1xzgI z2fmCkO)!jvM3VmRp%wT`5QR@l&C->QTacJjAyDMGIFsH2?naGNflHsf_Q!-wd2Jhm zt41{fgNTAgltSMVEBP2l`_nrUb$wwN$=#2z?YC>&abDP<8xE#+bmY46$%L8mU^1_D zm(hR4KT+JVT+{_8wtC1vXl_qA1uKa)4Zbs>1dh|pJtXsJqJ4O?!O`N_M|!wOuk6zv zroV-IaLRNv5uEZmdTV7`$x0O`{WCOiLwPw3ow2&zbpBlIE1%Xloy)212IXM6?7sT= zokq11%P(9K3LO-i(s6z)i}K&j|CzA)i#_tIXy6CyFyiXn226iuYx#?LIej3Kzro{n zbCqRQ*cM44G{kJNn*v-YzyjPQ6b(d|fk`v894(%X;)00_&A-2+W0wD#^xfjOkMl96 z&w-4gZLo@taaG#W2!>b({R#zC)IDXx8#NPB1ol}+w1O_*12C_&*Bhq}Ye>bdKLYtB z1)!!iOA8hCNvi5S#pmf7=k&r^vze~-5yNc@RFte@+yhRa)}4HUeiQPJQOH#W00MXD zyO&k=E+8liMhcC3cUD%%Vlmd$Hfd5-t5iYSzb!VGaw0h#FY^McNF`>ZE)b+Iuj={h zaqPd_we6E+RxpH=Z;G3S{=M+uUGD!uU~P?n6&iECYxjErA5h(93GA|G#5phjvH5=f zGjNO+s4qHolp+5UFZdHX5P$7#zVV0RRnXN_z|TEz`jE@~w?6$J%Cck(kZ_c}mGI+F zPkn9*u#GoruJQd6RsVkx_M38`Cl3fTMgR2F4Kkp_>YV@6>%Za7{|wVJ8lWeZX7B#y zum98KH-5knl+DTwUH#ie`ak(SP$$rnPgj(8|Mb+dpCRbO$v1R=niv^<5HzL>4h-xY z^FZKMGt?JZCCK7h6;LNC#j0~~r7la~f>k3+{mN)k{?`H^RXe%xy(L(q?8)&tB8QOpb?%F-+2xxTT+?JradnF_!ApMe845n+hoPT3DeOp3%;uyxcRA%vW^?QFI8jXt@7 zGL{U}E2oY7Z?m$pjTvSDuH50+4*pGOpGL8jcv*Nokmi84wJEN!)brlGd#QV% z@e}f72yn$(6ra6Lo=(;CjECvvj&^*ox$7NkG1?0(DS~m6b1X)UeXJT9;hpQwm}sDO zS7;J*F?7j2``y9b3w6CW3C)G$`>Ewi-NrJJ-2-OK2KaNnOI#=Rb#5n#ON=U&AbmL) zts;aL-ZBp{zo<9nw(O-Ts1Bt_KT2c(55trGvz1aMyGc;@RnnG%PbRm~gXB@ix33bg znfeMTk0eSa5;KeYV6$)L4y@`EP0Yz4O7!a5Nh7HxDsm6%>k48xG&Lv^GF^alxX{Os z_XzK>^p|^6>6`m-+1U*T%^+zAakAQ=l)keiTGIzj$_WnVz5r02&m94$UGBJ-W)N0? z=c*yT#j;~;6aRT~j)rC5x4Vw(F){dc78mQ*N%a+YK$$dGV8TVrM7<+oMZ(LV$|v;< zVO?^AJ#rSAX!(=oU)tF%Wd*09HH8p;a~A_bPlr|}*WTnd*Lm2ZM4UyfiLCb*Pz(d* ztXBfxUv^&|dNxs0*LLu!hgrz&zGY>?D-nwLSN0m5e+5S0-`VpD0~8Q~@%rK=bSZ3g zYXDu1|H*|tmj&enLtx5@bAsB1>OTn@&P8gcZq=2Jeq|LAL_jm{N4g&5x;5)t$9lx+ zOM;T{0Tlb*YB%PC*QA8(mVD^W7QH^TyeWdg4_~8?)5RH% zVl}s@@U2`Gt9k0W*v%?ns#=Ryem5Wen_ z+I{Xil=M`s#H(k5Tq9Q<&y6eYBcO$Hc`R=0vcrT!eN}SxV7=2sgUVhqm+ypCJTF2W zfSZfG4}-hKycfP*(|7DOhdYO!uP6_hBJC{e`YS77g9;aL8^ zfd?yomG;;sZr_EQ1-UfwydQZ!pr*XTAcs>S<>vENUF~TN)3e4UwiiK7-LEE;SPc!%PHpp1*zFGIm!O zxIFNN*yqv}4*^G4sd`GHvM$m?J0X=+WIX*dnoZtD!HhUR%ZKXKm9Xy@vFlm)A~n^ak!|bci;{U{{0H z29-?KOBRo3yP(Qyrj8cp%2QSArgc9CtC!M*)uynmvLE^c_IY$J>!rT;eO=o%0#%XV ziYw8x@bhPXj1BFrf@3>EKG0=(s}bhEx)4n`n!GVyYFn|vlOAZD07vQ+YqjL>>o0Z@ z3b&2tr0-W)aYcHQ(R5WedFf4d#c-N4%#Mkcf|8HDkq%gKxKIW9((x;T6u zc;3Z^KEmlUM!zV1U%13@T9{AzqA_C;FpU#u>;_x_xun{kIF`d$(Ou6oga0qMxX<}4 z?x^C}Z;NSP-#}To8hif9=@hasQx!3+%O&A9-SPFy-joNzvU{qh+-lMvU7(w#fogCy z`O1^!AM!+l7j^<`>!0uFoxDTu$Mr;(?(=zYr<9)V-XTbwl$Uay3CRr}p5iUVG(Dmb>+^Y?JIegIPY0g=QP9 zx{`!3ZTy<)w)gvn7DXWIy#yN7dM0dYlftXm*GV-OrnkhbqZg~@XQf@O=8QCUyBl0( zgSn;Fn{bhdJf1@uLRO8=Ha=_lN#)+7c(-%)0W3v zvjQ%~-Mq`r>IJn!50VcnayD+YKtm_-%4x`zL;|^Sd-^{C&~p`#RWPJWx3GHYsYa>K z{YOB+r$Dy=(s;EbDiz@wM(o;|U>e1fbP&HKFnivX7?0w73N29Km8#8;W&5TwUE&c$ za_E144>rhzF48{VYkY~~P84@bLrx}*%IJnCaqVgZ|to z9-*x!xNbj4CQBLP(@6h#!kDN430FzCm?rSTG_r1(y&m;EI**XB+>dh{p$(6^*BgUp z>N~GVNuEEZ5KoeHUa`bpDj-y1mov$iyFX-s&AHWHVY92T+z4QwkcJ7Y^u9~zkl9oOjA7mjyBHNa1y<&Qb>4b2WjnmO$n6)2f0ZV#tw|jRhOTu}BB&Cwu z^oNM)Ftgbuv%WOHy$v!y%MwzD`e@=1%)b3l!mg`Q-+K1D{hYLJ}+L9e4_PIqmj+URk(!x*)|(y0Nedw|9Eh}opW#{=pjRnjQ|Rh< zRJUENry5QJriNkFg?bf}$Z>--^9v?^^AAJ0Ml#}v&zTE7tEVK1FUYFDy~6oiqwbDo zX=0yv_!$fjzcPRyo|STx3fVatUh>Lp)u|Kq8n&0{RBPG<5V{4Qipjdn^6c?tipbTn`z z;$!c@-9}sYcr`ZIISXul{3Is8yXr&4^IiqFTAsS$cmj0t3(;l|vnhQoqe4^jfz6)U zM08JTB}p)BqVz==ChW7*sF}s#Mq^Xk%3*ewt`HtKxuPAzv|Cf&WJXIP9E+ls*}Lv>6>jT9%?x+ElXkaSay z#dwLMvu6(=DjrS5-ztlJ8fL;g2h-G;lD-d$>t5=a5!mZ^A&$qLss{J$SsOXt6#d+O zVdq0#RH2(~7khccfAg(5SS;PoR0q30y?^}O*)NFvv2hl_RdyX*_ul>-IAYZU7ZaJ@P8XQj0y{VEmr;Yzn{*GnOvC&$&yUc| zI+TyS#!-L-Rdip&8Wo7mIR90OWp696y+pdmx~&5kK$^5j{?n->h3%4M6W=bvAD+S; zwY1y@Ft1znXG@6l)JuttrUzn$NaHUGA5X3v4+>5?W?~C)V!TUm(h(*@{9)UrLT_=r zazM`0aGM-K@|p2-pgsBRQ}Fp-NsFNuNLoUoiem$+x4k{ueL5|nzHX{hijhY$VR%M> z<>dEi`z|8f$7z%L;H+y~PwdnA`t`i^E0}%FKJNwjcL#jiUgY&-3hTM)5_WcAa`Esp z^Ax0=ap<}CQsaGSa(p_{z$UAk$H&xZy54wZ8(Y`smn}hsS*|@@uDECzY#v6*pSbD4 zLI?Z!RMrY%dZGw9>Sggk(30A`f5jmJ2kkkN`o;i4_r0mlftQ|&<%|}HE#67xhHhIe zSUY8vFyCFY4bAF!%_&a>+UHUtP*Y1}_PAJ`UAQ~A`V|Gx`nDRBbhct9#na5i1vH?N zb=dK)*^BBCu%wkU<-y5d=~r8P$d<3OUxi*JTt^N~wq!7k$L%E~p)w9CIjfcNLAw1w zyn!}x+Bd)GU#PlBmPIn1xcb-6+`V5M+i1b-KqHJfe&8`Am0^_VI4NmKJgHOv?d>dm z@Zt++EM;UJw(LD{DsQWoR6b-un?fqFA@|RTOklT$+?}SAM!3vQWM8fwjJrC z2Rm>6kRa!8*lQ>0rL7C!G*DcRQz^=f^BI#6@j@Y(u9W)htZ(yO83fVBU7Zvkt>0V{v@pN;*PEJJ}RYG5m{fgm~8&J&`9__3WsvJ zkb2AzlmjdC!V!%!7}=|rI5qQ3jG|9R=zAj3YpQ9hJj+Q*HCXip1z2+>;*k|<20&(6$bw@YzF%Cs=AJQQv_n1NpB60+vSFduN z(y{lu1AeXhYUSWa`DxQL3;!ygrWJm2nPaTXV89nJL1Q_^faJ_)%EL1Jf-UY1OJvP~ zQsj)`D(2GgI&!=LPRB2Q9+o|0>Zh{Jvf1+)8+v#X{k`KHJGDfZaG-RKxbKn~Cv4>0 zNP0uyQI2Pl^=rKe$9yU4s_NyA!OY}s-<>>a!GUtVOC_!GUsNP;dx&9swS9ZDl^KK+ z#v8U0y}yCo&R}jWG|xFSk`N};NN3MaS4iU3!*q_T2}*6?1vi#eufu8%YJ2l4Dt4QQ zL>{4shE8AGm$=WIuI-I4OBaEvO<*vz$%N`wOyUWmYrxXc`uxk}=Ih?)rk`PHlItJO zt_&QGG7kU|D)XSSJXmGF7M6qrdMMP!v7Hql! zJp3#)$ev3~MDKI@GbZ1&I-UlNV4?}bQAT{Y1iJC&UMkhj*|(uZSj4miEyIXC%WQLyFdr>hoO;JQ^{_8lQbGdbE8WOw{(5DrOtnZV z6S1{o>(@9I$mRE>g!nd}N(!%baV>k5mzkWzeo@fbMOc=%+?Eo?e;r8`XidI@d4!Fn z2&;-lEYEMll%wf}*(MR64u!`LYOB$h`z|G|sQ-t(HxGyMZ~K5tmL%zy;)g74R1C!+ zJ1zF560(k+!60j6mrA7&S;n4atYe?Cj4`Q{eH#qJP}vPLStrJrceo!R$1|a)d7i4Unu5?{+;-$p+gga;pS*0d z_)(N=%Pnc&Yp5T}ZM+N=daeJuF<5S0PVw$mzoC*(ysh*M+}rm>V?7w^`Ox{*5S#NW z0fn9Q@9p_&B|tDw>AZZLL7Y`71U_N6@9-RlHozm(MqIWG42bJbT&9?IEcYR+!71Ae zJ}LTV)e+x>Vd>C(TZAx!?nXULrK(>u_fzvI_Y(qtVL;v2q$ zyId}XAR&4syUyTdAoT)eyB`X9BOmU0P3kW#;WJlVcx`BbyV}JfTb?iQR9^X~@TBD9U08|@VL0o){RVIFv}HV-D(Snv$S^EpSu zkfB@-l3ppq{s72=#(<>>;zX=B{W-Rkq@c1`(jB;*g^)Rzd~DU9qXU@CF1gv=kgbsM z_6GSR*c1^HTJi*iJ(f8jb2w;d2b<5t!Ayn1_6#e~8I%8IovhVzVrzgv9x*3DEhlNkGJd zqa(m}VZC_^P6`~<3-_jcWfco3IL3o}mzIr6pW-LalGpIzc7IVD7tk^^zvu0KGx zcuztujFdfLTrdqS8CMhCireE&m|A7R-L4153v;79hN$z`c<=ca>!0e*gy!9ixhl9a z^i?%u^WX)+XM=@mQvr6U+ok(W$ii}1-N1g!aLck^!_Ldv36O?Hz{RTUvv(rVOuzTW z62G;~L0L8Fq}($h2<3YN%;@$Vf+h3jUz zZML?2I*O}wd79v-YG7$Y%zbKN;uJ~* zr}rP=J_&N$Ph29eD){U_tgM!3ud_ckfvH+wD=l9n)12??p`rDR_gf(An2@&)?XGq37q~ijVFvF@awL}D*elH z9RLki+C+q(KD>j>Pm-}vAZ@ji!Wgg2*3x*Cn8t@6`W=bW~~(g=PNb0gVOj11_fV(42n-}~&7hbzt7oYgc z=I|*oNw0ZlF8N4H)^e>oOe4G|oVC_Ke7wxKr7(MP=!6f6x%;9tOU|*xDbI zS=|QZ(7Oj@?yXJ$gB3{5ZP7^)oI^6B66cy?ZJMz*;43}HG6Jt^{iU+x+#IC*@Oz^C zo#sl<#Ipme-1Hg;5RpBJZJMQ8bY8WX$qh@kE}`cTW$Nwf zLapv@83>@N<@?PH8Qk_&^4EIL(d-|Rm6+)W$Vf}6iT#HuyPE6J1p`c<+KlmT(S)&b z6K4C~nhC-R_}bd&T}>M_kR_FpJB_JS<6A8R-CvQRZY$8txL53>wl{aF)59W{wQ*7!V*wyR z8s*KIJ5PrU(t^E&uEZUKtOm3{|I%kDXEgmcieUHOIK18%SHRt*Ld? zQ4~q|n!8VNN;&G8s{QRHS_~~`DEitXw$VUQi6*NUk36ZN6C`{=E1vSzIuiqDn1d zZ$Um;pG(V@XqXO5p{TtN)#nx(4t521tI!xx!P)oKO{VvsWU-&%2OMijkdJ~E3CwCU zf`)^NE_Pfw|~K@F{C^=B`Xp^vRv>voX0GLNVW-wy`jC+Wl^j>^#lKjk1wz z%nV=7^e{${63Aae)g3Z+PV}8mXqfwUDEQ4^AGQ;pn;c7x?He2K+cuK_8c9kG4DT-n zTmm0l&iopZ2WWVDy1KyAcriAXLX>v*qt#Jm=veeS*0j-#o7;1jK9liw8TkJ7iOq++ ztO04fR4>ncc+ z5FCHHAsJX^x~uWh_6#3>lq_kE^JVhYr|}`BN~0Hlp2U2vH~XKH#N;b!Wq7VgFJtUO?${{eoAsWYij8hsjT!#@m-G>L` zR+~mQYlL6*Lr*)IVx-@`=%vEDk)h@;eB6faSHv^&0Rr_;x&)J>Y&8yeDwPjFraV&R!j^K{xx^EvJPm zJMNruz3Ir^^K|#onc>#_Y|vn6LoH)oJ5>F3B_mDX)ljN9cin8-QX6)&*B|+GXiFa2 zf(rBF&&;#4Htw}<(o4&lLSEPvx}^$75o)+D6o*;fx$ zh2JQaEM?!R-@^o8mRKfa$I6D}ZsJGo+fC$g#QV8g9<03>{CW8&ji(&BM~=KZcMUoH zY5(hlK2~@{e0?fk^y5lw;sf#He}VV&wuJPt#364(s``(@V7ykYjBV?6c71!_QrA}* zTKKA$u+<6VHXDb0Q$tYskmJT{FxORj9x-^quJ2ilU}4R{kI$10tkIhk%Cyv!E#9|{ zW#Y&uOsjf45)57bf-r5#@w{?_V$y4xA0zUHDijv%``J1;Sdot+yLz^yGa0#9gJnE; zvr!qm6PCnRQi`Gxc1|!l=b0}@YB;y zIPP&<;KOSkJrAeF*faPS+AXI2%m-7=4;g-cn2QP8G7wpvQXh6|NLI-;E7=NcMQsmw z4^3&pQGXBesVZE1OY-1q)zz->=cox(@fnoHpvF{9c2!fi@N{Uu-a?AmB-K!ADy_~? zvh?%4RX4TYIf17Eb0YAkUBAZ>S-NhJxqPd^uY z!wH)6dXGMgb--q(=3?VTB;t&!ji>!*%p4(hIkcgi^7-}Mz~O5f?cKS37aM~j%#2Lb z*DgxPg}@M`)X^4K&LygSfYBY8?~OQZ=u5lgPJe9#eovN+MkB)yqg{0^;&Ve>%1BR@&YrEmGbTX>8p%E@6Ab#S z>gf5N@Lq*Tqf4AXkM0&<05a=U{+?O3QGu!%c0~SS(YeVcbZfK40NF6Zpl|-o5%(&J zT~k&9(rhl$8)H7jSmc7qB7o@?fw+Twn>uJI@Kmn5b$b(KN7ni(>@*Pdh8j^lVKI6S zq=5&8Ki{Lof_nR9`aC;CJdxlX>MLmPVnXP<18CLB;nkDgY7|-Frha1hXK6~UlLS`G z65p4-l3$IheH6D;>23VvNs+C;lP#k<leu)9Ej#jTjdY(3Co zDEyudZas&llN&L^FNR~?!Gvxl2Ep4w%#iwQG7;J7$u znvPOnhI=Ktx2g~zO_43|nu?^p`fIhn(%FA<(Z5$~fu84t$1T;acFt`*_`tpukd$~! z!*@v+wOg{2&HIsvn`Qt#?qBLo%sO4lX$9S_VPQK*Kh2# zhHg(ZRhdfJB`7HsA%ziV4!(TgeG?O2$ys zp!sxhCf{mtR@h4^ppv3nUzS^II_&F9^$Hp;4{u9Vo1a~+hf1zK*xe+b*)3R--wc2F zsX|HiYyYKIhM9K8pgwYiyNHHIsp0p2)Z*LJ7=Yg#r8TTo6^&5YC zVTZk&5X?CCD$5M$-^P}Zu*DSxR(u~rLniGD3+i*=F4R8JI~zGdO5W2Bkn&`7Hn+f* zhCDC}h>NR_1Xscq0Fu353Ba4>P4*Mx&{G&xkjANOGMF9ochH`3-{HKs+wzc3_Kls& z`V`xet@NJ(e&=6kUT5p9H+TZzfU$NG=ojw|dc#n0_>#ey?DNZSN0A#s%})jK(&}Vs z-JtB7IUW{Lbo)HfMG3n62%uj5l_krI%+qbzY)_K#PL0tw>sbiML8gSgJH7x;D2+a* za-^JHk=#6^jFoX-2}#)z5%To6UT}x+w#Nc&jby`wk@91hxnqwewr1o&u()O55XvGT~YoF{cS_?vH;f(97(Gh02 zWuhUAOK?X5ZrtOG+nXJZ&eRV*x4&daT^mja-RuNuzd~j{$zJB@KtZgONba)V+70wI z4nI^J_6{g8956;Jd@5U1@)#nn^YzM)APscfHkc|U%`a+m?Ywz|tqd}=D;Vwx<0GPE z*Te21Oa1-s?3XPD4txMp`2f4z(CPy)Y5(A8pk7IUdC!Z8XTw@T>Iwh?@Y*+k^73-4 z_~%pr(|H*&R=e@u2$@+40EKPb=%59)zc4etVvZjPl^VX79t1LD-c%+v_O21?EB93L zju6g%8%~a2>&;bji>5rVd(hG*14Y2IvossOTkbJ>RSeo$Vk7 zuZjXS;S|6Iztr}sU*^YcpI1JgCqx$+SCqdk@q_4J>N~ci@oiV*Rd%mTdqFO^#-UCN zc{ZCyXslbPbSsY7XR;TsrYY?z8iQm|?hSXXH^YJVVL+wtbYi8&|LE_8kWV4X?#04k z=+jiZhAb`j3nJ4s-Q1|5RpXZSORI}B3zhPb8wTWp@oqYUW8Z=@L?0Jk$nZ{&?7g^? z+tFt*vXqxoq%Nf3^|~R_`sAB$Yd)?!y`r-MS8+ZL&m@@4=uA+z*S{ij&m~^bss%^( zug@U|@@`l{EjC53X4+daN*m5OrUshv0MA_+U!OGac^0p`2-xM>buz)6uflG=FB8}}GX_(($iL+i;c*-jrw&5jlDjJMQA=op<; zf7y`uIbrjGR8KCUdP1E~SQlk{gT zv^KZC#nPHwV}NMxJO zDM;TLy$_?&%@StX55EtqK z-&O;ytfRhc`UdbFniMtEW@#rIqx?|EU>N@L0HVnLG~+2#!UpY}9Z0Ca?;4M8$||$FCFd zK`NB`vy`0_2QmU=*-xZQkfUA`jTt--$?f=6T%+MAf|O z`ze4dvp{ZK_zOC>6`XyOPJZ3A+T7%N(v7xXQPnA4X zMwL?--T@@neCPZv9j&3lIjabHMCT;q_EUnLPE)2zaw(LAK;_N;;5m%JoRqC@QI?ZA1TT%( zWztGF-fG7@3yDA-9V^;Al$Xt44mL|qEhg8r98;lAb#h)`3im7Cd55M#B+TDu6JKj&*Qh6D%#RPH>Y+%_*&j@bDt8ka zr14S9f#QRhh;%$p&u%ZnDjDy=gic6XS&_2DgdjG~bWS-b=Z|+voEGS;oM|<7!m-^0 zeGpt3_4H#z=BCoO$B8yoeYKxvb2C)ue4KXDs_f7_E0j>1(L;gFQvnT_jT{|eiQWX@ zet%~zo_y$*PL_>(d}%?qu_E;`AXaoaw{d$lSYwS%l05@oipm+u2V6d;@-UJ2@JI-D zk1%0;)(b64xpzop_Y4yW=vYv*I>M%@Yxc?>F72EyUSqBCT-F`GRa8w0mS}Mc{`|X$ z-{_LWHR?0=w)zWe!oeC+vHPBpV5Uz)?(#Cl^}LJF!%_PgZ@5bWK=2Csdkb#%iN%y3 z`D$-_Vbx%cTC@HtVceLyP%|U`!_Rwp`}MJF1)ob^;tSYiy|E+*rM*P0p`pEO+$cN* z>waJ$RnA1EO+5gf5}vbdpHbD@>MH+^_U=P}hxZ{G3<@z4dsqaa>FB~~{!K7||E%HIA-^|Ww z1q48@cjI{^EwagP@HO$g9A;K!Gro8EnwBW!RMA`yM?{J(ZAA<-q4?$$X~TJ?-fX_L z$3Jz=;B-Tp%=8UoIpZL)n7W=B*Io|;9w!Cpw&2DSi|0il^586hOuBT%+$E;Gm{Ss; z_zAaPdN%R6maH(*aKml<1Xw+f*rTDVpu0^IO`dy+i{~K-w~_J2{a2b~n5F8zuvP*O zd>Kclf8mZa!0uL z@%b#d#e>mS%e7NE(y*y5$z9bc_*|)vRMm$F8rj>=;uXIUs#0T@d%HM?VJ+YBmG%z& z=u(Z;?rKX=@t#Ylh=#C(VPwlK>?`cU4%pQAUut-sQ+PY*+eyZNT;Fb_amMA{82~}; zyGLD)irdTJ?6QamvZaK|p})sYLkMM(_Xowy>JPeL`k3Fmeh@+!pPR`##@MW}ZpsVe8DcCLMS8K5S7)!hF?Q=Ap81OVwipcDBh#nf1pAoF*?Gfrg_pi->1pX%$A^rt_D;`&mVus-?@;>RVSci?zO*nkWob(z{4D(##&T_Y%BuYcLj-g*=yE;2xqgaIW zVs6DOO62;!v3a<^6akQ%B&9U$lf&ZEtvaSRmN8P&*qnSy)kWdz+wDV{EwdvDnhZX9j z6N>y}FK*voGvmmp?6lw(IrZgBx!*U+8{#JED?eMuW9DhY({h zPXp`)iB(jFERiBmd|EPeRVlA0xx52oJ%w#j_KgRe6fC$BC_k_ z4qQkQ`8xg0eS4|A5PE!RpE$8N^U}%%#IqGKp1!p@IE-%E2&33qb!Fly0K(m>ilo=` zk!md@i&;Vl_0;mtJ3Ez*#ZE>CTmP%HSZtQSNJ4y=@>G4)B-#huin{&TLH*|)@gj@7 zq5AWrkB6S$Ex3liv2huEA_!s$FM10;RWL0=()gNk2bkV4Im_K8%NPa+Gff2PUKg1B zb1iNza3Jg)ArPDmP8l3~88ikDjMGO)KFTg{eH<7dC;aucwIp+|hKA35|KL*%DS#F0 zrCC&30hC`jnVZ9td2bhwAl$o0lJI*Wr5083J9&8JW$*s4eU|7$2wCGy#RVl>(@c4Y zV{`d1SA$+tDmbkmE|$3HRo<4?Eu+#8ttbfW%WUWXM;B18W_0T$JL*_{Gh_H}=`pWx zfy`9TSBv!gHa_lp?TrWoU{GIM7!ZKMOMh>(qCfRN+#p537odEHZ8EKZ?jC5mC)fE^ zzAP9FOiqErX(j=nqh{Msk8mX0_9Hd}LuwJrLd2-19T0z=`WKU$+wX@Rqgh$6`jCSMEUOQs2ZMHlO&f6dXDFr_4wWL4ILo{U z`qQn8DKmja_UO0J$XDAD1L3LEDM0_U4be3j{I)FmN13#cAD7Egvufv6oU$c&sQC^G zSeg0P9b~|Sr8ta{n(qi6ykekCxviMLdKY@3cL(*c{yiZJbe`L=9IXJ4b5WbI6DT^1 zSFj29TYoWI_LX-|24@@4otp1k&Tbp@(?r?F8)`sb=~ANPakwxm`4zr-mp1+KXlQFp zZ(icyn>Rd3w_@6x|K&40{P;-H1$}ynjw9#KCv!k4_dk+32RT0~>+(T29xX0+5}5D* zZ?@n+p6&@TlQl=?@7}rdzuBArgkg>DViv?Rx2#>^|3fBc9^r7JRt>@C^5vhUW0-dD zd=HfjS6UE6pa1*eXfYO~ryGsTPg#u8GI!(q$Cxf%()_mnre!4y_>X`7?}%<=*mNgB zm?}}^pAt)TH4pkFUHEeAt^PkI@SjPoonoe>f_&otZ{q#`{G1rY%sM^sJ^%KFKh5$7 zvv^Qkm7>l+k%0d_VP>C?oH-xfDfuOJ{a-7o8OF@KZO3O8{3%WNd>%8ombK;Lz5km} z{=aFkt;_*^BlQmc15xHbD}UlPGiR5@?qt~?%V&a^16tdfcmLBGa9(64&>oaIV)j$` z{o3@iD&~NGg!lu0`-PdB%=)tqQWXD_%YT~6M+@eF!Z1pLzwcO-Cd<##OYh>Yr~KOA z&-r|lWDclx>NNN7!_gEw_!B7kzuWoW?fmcQWG1!$|LJsspWiM9*QH&!E?;u!cb;(@ z!WI$)HiM9I4xcK%V0qStZTSz9N$hqJ3+G_cOI^2=sY9oKT7~|@Aphq{7kbbtsb~*y z;CHFUW?U)R(YntW2f27SnzCM0_ zM&y^F{p(NW_#Qc5isU-*r^#t1z6|{XkJc2MV=~46PKvn+|J|wo5Y>Ol@83uHzh~;- zZ1De8Mj-~6V>_UGDOx_0%Vjf*TNb(|erXD2q^wv4(4j)a1LdCc7Z1OB*ZC`M%a^}& z)5XSvBde0#_8Z1Z?GPFAv-G!^cEhs5qYTKCFxiMA|CK&K?cCdB^v5ERG_1SWjvcXd!y9iE`PX?(?{2rvKXTl8@X2b&sZD#S_6B4eYkdPuyizM2+2lImQ;c}s zRK8y5PErtX?{jD{=00nziVeFFt+cJYXqa|YS4`#fQR1drJkOw(|Z^A zyj_08KX=Wf(oo(uzztS9#k(r#)~PkGUXo@;P&T9_ufb1^mk5x;14mN*MHx+9(5V6! z$P&IX#|NhV!_6r=%*SsQV3#RlZPYN0a>d3pD<+{{u~faL+8kRN8DpLHfQ9-aUS?;W z3nz@8!kD;LbfjqTr=~Z^Nrwh|izz>Q$AiOns`>+{Btpl-QQqFq#qFm@k4jhHiO4|O z1;md&z*cf+yR=0tAz_Xy#efMfJKkGEpN5fDN+{~&BS`np$fExc=)ayy*qKEhE~q_*Ly$&L!kbsEPR&bjxdAooy;jO0z!EQaiB-#CcJNiZEI4 z5+UOl`Elv|5o)OaxdZO2-5poxtXJipEyf+!3 z?=~IuR-}dcSs%I>(`0PPxZ z_+w}#K*+g6UdE`Q+@SoEfI4YSd@PtKBR zFcbbCL7Qy}-RFs{P;6rjc9x&7o0G>V>AF8cNl; zmq>G7a()G<>av%9A4qaBz90uFKOVzzr7q3x{Cucx?ImZI`h7HY8X zMJPow{5w{ko+!Ik?=W^8wFVBrY=ifPk$&ygZNe+c307TIx(#Y-xPn*l~ZJ zTo+Yl5-U3F>o+66+@{z`m&Iq6XAU_xPaWYcRM~oty~!b+_bXe8(sYmZ;6HDTt<_WBGDah>~A`MnnxHNIqGyvB9ybS>?eUjZ1249)%2x&=(!7)av^@0Xlu z3x?BjjNVky@c!3!-i`Y8a%MX(kS`Dy>qXjLpt733jtI2g;udo05OxcA?Iyi+>GKMH zNHOy1(o}Hw=Z&!b7_~99!HuSc74LjNzt>>%qAFb}ut9jm3tP%fxW-ALdYvH#T@(o4 z=u23jA&M6^H`~?bWN|Nyt+mJ3cRPx<9o_*mJa^YVCV3|XN$?-_mhiE|S$L~p;)cs} zz1_O~r=|QvL?^0B{q4}3QWy)5qLJ^}ly;8#z|qhV)-`p1($(hkgA8Tw*bt&@TId^^Mm~_!K*qPsidk{!ZT{=1JX;AN#$t zrJ`va->djl`qR=U&C87pD&c_!hX6quHR=04Ws&^dX|da>Q;=n4w0a<3c-9Yhovdi0 zXc#)lW2k6gy}74>hUZfD?F*c5P-LkfNVxfuM z!*3nM9$+%^WY7k8r&8HdtUIWD1$6@zAiqQ0_iPqH0e2b+z`lU>kfp6*f1Ed*7mn~> z&Q}_3S)fh%6t$UglfwgCKp@+#-P2*ytmEmXLqn8}tBelrSY+__9KGmZRYN7gGU#Yh zTpxx!@;NvOhvGON2_yzpg%;IImWwbJCW5!-x>UOBkMD&B=`lm&-h=zDVvg||#>)lU z2QIcj!l2zD$Vy4@ysAaU=>WvbTgC9`rnSkfuUCH+zopV&MAu5#1<_RSH?HI0K>4dp zsOB1ke=36bO(pJuxC0u{+_!{IVeMFH;i6vZv@YtzBi--rbzR08ah|nQ?H7hzIqOt zpF@^z;TN8oVg{S8%71wn!>Ky8XQgLhuu2fRoENg5H|oOP5s<4x^@Q^lueh;$Gf{3K zPf$M7@YVycO5e$MuB~D)bx^LwiSwRcJLQ6^I@20@Slf?Lh&Qc<`x7%cf(J$}{YJ#c zqcxFSN!-hSjfErX^xV~5=C@N7l%>9Gug5M$xW`r2g@4ghF_1O1cU;UHdc5Asb~PeT zt1(5~-MRY!@5!}PV(=!#+%1i?BxP+UXpb9p<#_D=z_r1$_3Z#_=Stm}Vc^Mu+b*|^ zqg!M>V0%hF1yTxt9j|JM;oYenCY3Ss{4PaiRFB)H{D2 z3t8RsHC>(mn{-$1@tTl0$K)NVabiM*l{6P0 z%wsU;j+0+suq?9GTV216yXCn-=R>-G5iJ~g(yL1J<*!(q{zQH!^bEOC084!e?Veh8 zNsg%qO*~LOpNO;n2Jz`h+Ay*(#&G;$Tdnez&+5D-ovgbcY(27#ykhN;^!9V}EZ^?J z*m`^gIH7n4SMQQw6v04u%E+%oz2oz0634%3aQFR0JVo(M21fSxwOMD6LueV&C2JS0 zjV9+hP3CEj9%fiq1mmCj<#iq${V;o}TV_)HLryN=M%ih%^-f_(^Z9+CKW5MIkwY@S z*CrA^6a&`0>mT;2)s32W@S4vPFYjwAAf16t)Xhu^=L@KH>NsRZ4RTk)R@-LSG~gs& zhGBJ%*N}f^>C+AClyO<;rx2*e?1?6FVo(sD>2C_z_rvD{6TPDJM10T=eTtm>is`C$ z+_j!Nhf`ti-nUoYIVY-8m#OEQGiOZ>zb#Q0U~Rkogj3(fzL_yD8)B@NW`6~^%L#g$cPrw8mZD`~z#pF{Y{!Q|uIt&?WY$7im( z+Or08HR;HV>D3pd0j3|aOIB}4Dib}vZ-!B?o(mLd#Ly6S6PfB+=dhWB5=eJUL?*f| ztwVkCYA{f5RsNk@|6DPQ-_(UE9-ty{R3)czdgHGD}IcMHSk^jt&PXT?Hav+ zR#p6tB~uTtmoGT`R5Dxj9AEs82c=_e2jMXKq_=j-QL~T+8Kc6gK&XdvXGunLYrOeG z63zcQT{QvQ{dPhp9JKeqvc#xc9(2e7O$3?;p&cqtKcKQUttLZT>F(eCWq5)9 zY03ioT1He^suQ;q%75-OZFXTUACKeXhPQriXQAB#7N-l_jwse05t*coun73l{}ynr z-1&=YcpOm-KnGPwz$}Xva$T%s@BVg@#p*Tv8qPC|3{DXRe^@1hQlg+-jpMhbu)z9#KHFL!NKDao;7 z+6tisWgIrQ@>x9dBX!)5{#y8~M|hFYHjrpZoLgCdcVP_`JH+8}i!?0ayU^7kj2%?e zQ>@}i=K?CgQttqX_CRt@iud@656g9b!$hCS;Us3?k=)0*7mq_dt4JIT!IedbiSbX3 z*?M1cU9H@Etrv607$I^)ZfD=nn-%>0+Q7EJ0cdU0Q`+j}z2yvl#>gZo=%a;g#2dXY zCR+dZg2S}k4Y24DTW+G|T78Xq+*maY@|rS$I>{=Uqhl|{ANF&q5X`O{2SzI93|z8r zV!v3wGF4pTjF*OBOl2Ep8-ix`C{VS@I_K34fb55WINYF^yqhIvcPXNKHJNWqZuXT! zcvj#_iw3-s7h=Y&3I`_F>hv$VT5zfmN9v8sZHs~0WeeZ?HJ}HHM?S1NyW!?;>sZv^ z>eu02AT7SgxQX?~d|7b9%k*zfC8Mut>7iGr#jz95D5)LyalJ!!9JWEdR|nZ@gI4c@ z$Qli3B0|p0+9zs-z#IJLhG8G_=0yTBu&TA_oR`P#l}x5u!U1Q?wKK4`%Cw)ap&w4N zIo5f(kg*U_1zkRc-HDSOb+D^3(jA{!y|JYn^Jy^ic{$U8JvkAUT%_8>7 z{0(n|#{^GkiKrpVr(MD1LMPYG2=P+-PeuCJsy^ConH^v?oJy@+lH9UAcM*CT% zhabbeTT2`}j#&uc=gtU;W5419Ebz#Yt!@gwlcFBkGwII{8;xvxR2Kru@w@H0^W#N! zQBFA%E7CkY<{#DQ{$=Dy%wv&9!IJU<Kvs8zNc_>pg1$Klj$A|H388s{ZAlx87-s z6*(T%6zrhS*4ZM_@Bf>mVV;cTOUFrKmH|fTWyJ!>Kh%I{<{?!u{j}ApQh!2h)@5V* z6iR(llyv^ob~V$f{C>UqkGRXHn5AKg-kZ&z{u4^{d!`SUtNX+CKQ(&qXI+yc7BK%m zjpy)B|E?4d*Pj}_tHm^MUAA%Q`|ID<+kY5B{y#TKU4zZ!Nfab=c8*^!BI51IlaQ2e zj;|rRasQ_b$p4({^EH~UGUSWQ5rRoM<_5L0xbrHKMjpc@uRprkAd6G0wQ9e8gVa92 z@oA`ULjH{fFy07_^W7=m7XP~VD|Zk(9(fRQ{+=^|^VBKtl$I72OV6^~RmSk)^2|%j zO0|IJTc`9o#4Pty5w$_#l{iO38K|~4pi|c*d2`b$Db+k#=*(#FLE3Ne3(=2x2iR}k z64y6(f8@;ez8-eFPuy((22{+C;=>tebk`xT+A(qDFwI#tl z>{@ImEQPTRllhThq{=QMd&$3rd|m6sz8?j&W|j^t>+XYEwT zR7W~$*A7dkhSdD7*)q9L8{6_oudn09jBYi>2C*OdJ+Jh|&OCC?cqT1_ErjOHixvE4 zF=4tIAM*|Hxo{gA%K`*V2gi;SIkIs6b-wOkZ9wufW1Z^4KYqeLP`D2kH-Yg9S#x69 z3_ft4XO{a?tSYp$4-q_A>&qYK@*7D+MOllTQ|pE5C-xxjwti;zA$jFSx(BlwkL6C~ zS?k}zG&J>(jNN~6?3r$9y5@D(P894h-D=UpvL>!`^tZMXtymON9i1dg1Gj6`CLSM7 zTGBmgkMjg8KlQJ2f~uZx(|F1xuD z>U{_aEVRQ^JgzvG@`o6gZ-7J~^Gf{;U|VDEx4|_7xsQl1HuiXviM8|aF-#i;2+G-C zM>apY$M@STFW(RYKXO*kG_NvF1w!swFi$n-{^Ld`zl|f#a~Y?{z2M8ED6{0_p0a;f zAzkk1!VbOEx=Yhw;i7}JK?p&E)s>?x&JJYI*3tmHboi<%#0(yB@;Bnw6iZ=CI;)F1 z<5uU-jbszJ;1@k_w5%d&jqPY~Hu}TsO{~n<;d;ABcdkqNvn+%QW=!nGFoV%Qtay@v z*ri9#FC8sgGw)57TZ-BqlOWt*2|;vF>6Th89;{O>z6N+ zHZ@D_KPZXEGo{?eCca_TUxpN2cw(&nl8Lxc_NBT705{Z5P|BahRkFRoF3lKG z?wcy?r@|+8+>vI@b=l!d>X#1RLos6y5`2yOw3WV&&$_r;4bTUi{r%hnb}c zN5VZ7jhpSP{({%=i9NvT)SDwRm;w0565&Rf9qnm0OAVWeF+5CU_Lh6g!=*Ed8U!Z9 zOi)bSbM~(K$siIoyOf%yqUqYOR{7yKRjka){@WYNtE-Lz%VG7Txq8@!(RD`UTXSaM z?HseV#r)e$HMdGt`exkvMEbOv!Od1WIW+lWWW9~={etDPOM0m*xLFPZfl zHROKL2v6L$$H7&1Lgybfj(8>M%}*COS(jQjoR57td8{Wo$u5eVnq#0ExqlrLwNh&! z^Cp~1?6#bVpYTbtQw!Dg_Ra}g@8r)xbW!gea%pR}J0T{SEAcDO{|I7E0ro({y&`_6 zQs6!XLsfqPrDONjt#_8oDUPk6p!-cMb3RARnzqVnF$@y3wz$(LFQwg*4@KcLl;6mR zqH-X30)=e(@!oWO@N=$S?6FnJ;TJvOep7rd6DY{WoLF7=Bj-qHnwzYBaqx;fapnr48pEQQUj7X+qP4wAzJp=hRk0zVdm9 zk=7(~9+<<_LgOdHHnzT+Cp;V{25X3c!fj88c@E|`ZBnKczdes)gYwxR7w_Taox@(;_4y{pUr~O7rqb!ocBWb6tbg5BIVYXk{)lL5UjSp|#G#eguc+29#^v#eB@vjOOYa~yc z7gGIk9CqzNQq@Qr_Z!=Youa!{c0}E>FV=1|pN#za)p)vOu<_fyDtpy2w-U5U)tLga ztI?cJqu6{BJOx_m(}IWA0lKcg3b9-}SIhA$Q@bo(-MQKPqR_p&y65H5VwIvwAL}N~#w4|{L@74`Q00bdCv z1xXR4Ls~(kySuxTM!FkBQjlgC8bP|dh7gb#x;vGjyG9t|dHeZ(uJ?1V_xS^!wVt)+ z2iCCW{XS=(efECs*V$+9uYnxcvDl>UQG*LCUs7CBmVeq-X|Y#t?W{=3>DTLV;DoAo7?YcoO+O&byukTf#j_}J53*YU9$BaaLp!u?ur0K~j2DDJrA+ntuU@Bz_`)4#n zDe`(^qc6y;x$1rg&}fl$4B)L1T)WOK5JvbZb48r*`Z_Yg`r$bRxZivbQ}_L@8WiT;lB$)9iZsY{Kq0Knhd@SZa=Y0ByO) zv2U_{;7wxpiAC_U^LXh4%*3Ywoz@&S(`1{QdoFBe^5e?ySX2*sB4$229~4H+Fk8bb z2iW{z#mwvpfJLVI<}xXwJ5!$VZzla2lADb*5$%Gh$jCcYmL=J%CFnIK1_sTOHI8vp zaBqJ-24r*jLB;asRFv^&eRQvDK6A~ zU=bmHi;&r{sbIqnt+T2Ma&Yr2J#{6aYeLzwPcfKDsQJw{*dvjUY>zjY+N5v>SkIb9 zUz+Nr7(U}t**JJlVjt!`=t+nj+WP7m_2>3=>GQw2l56#L`|jo$jV}#2Kc}x7D*xCQ zXtZe}?aNF3Sn=Ig>F3GEEKdj)JXTu2d&xv=q9VCAlp?u4{99K{;P)ccXj^DtKOc_| z3)@>&IV75#_JrR>rhX@(WDoGiv;KjdT!ee{n`lYV|5t=g*^Tm2#eG)PM?>zXcOGs-kIkDkBwU%G z``4s;k#UTdua>Bk1+LyDySD-%R!%kVUSQ(!BRih#W|OUunp08aHSR7bPAb zfCLsFxiVMOIFczBTMRu>GiYIrmR5)s*X2C_E#$&wzK5N74)knncC}^3FBnYZJq1hE z?G8WHPuqvQh%nc9TWX*hn2Q!~4nzm`{@LprgfjbkA0tw82QM!5F)~~(c_Vkz0X6IATC-mFMDfldEm>%ay&7>nCF>GvO+27eOX+f$*85NO2DH_UP$rc!rS*>|Kd63~ za{be+{s*;6P^)mpgLEGJ-EjUNcn&lJfK}3JOwx=)m=j@DKlb{~%r$cq)gn;qbp+K%fS;@}n13|9Z890~3hRZ5Q0vzL_AN zr9gSP|LJ}KqV5$^n~q%6ld8El7>9IJ@6CXYs2ZOOt!yxiF_fPEE`o5SFFk5+r>JF! zG7MJdfrx%Up#nBpT3N|c89Mk+3-wcGXiTq78A+_^sz9HZUvN?f0y6YkP| z71#!m%y18DwH>J;JN}R5?6w~QSBU?iApfhCJIpFNjZ?Wr{Rwv6bWsriC4?81 zr(!}At0KxFn0Ix~K_}QceGzHIvOB#g2e4zboHT|UgTC0wkEo)Cx-Hp%8(VB6l70h` z{F^sb1j(zjN$?a#K$e6*;PF4xW0~06VXqC z!MaZ-=lRkbjZE|G$_h|NmnCe{N^yN>;Iz(5HNvESgvc8%qx?Ho2G&rPhrXs)}1r zP=5{}rO`Ha zX?57X!4aUfw%&Gzc8mL-dcG53!eB9KksuHF#@Mdzu-I6&=xVyX+wDN5{5R_u2%H!L z?%U~jI`&@LiRbw~1V}-6?#R9Dn&$gste0x$F^}WxKJ%is)gT-p3`)=MQIAldNE#CL zb_+z$8-V`Z_fEeF(x~nd4rlXCdfi-|i>~yd$A>_wS^n1@2TI`md<%6Tt&btSVk+4y zk^D{J*CKajme1gq`>pWIlVt?Om7VbGFMqUe3NSw6S^XkzHoeE|Pv#Kisd&N3N>~E4 zTda|~I>W|7QcmMg1-gx^Xij1HEbi!RRx*73`+*{*=ubzm6;f3CHtNRDVAK%4)D*MV zv`G&1rxzRROLtDAB%7IqlAzBk|6U{&qqad{X7hkg=8G+9dCcz(8?%K3~Z708wbvG2xcDL|+JDY8U9Rm0+mr-@Sj2#Azj-I$5ea zFpx9%vBnI9;Y*5Y0*X zuDqnF=poGX^es*SnvP&95o&*Ym(epeoY<@D#mp{DI4+ zP^;P94LpI0INkES1&RMcQa!5p6* zT-B*NSu2ZQz%>;o{V>TihL_LGsK7cv8Ej+lS!}1r@9K=GmEPF*5+nfpr0rU>m%^h| z;P^)uo^2Yiw`9uOb}%k|uCTm1Kjs3Rrn3C*`y5oI8Muz&w(sWf&Kh}NYF?htj72w) zd#t`QT;KW}lUOq>UnJa~PvQVM&wuA<^?Lhrj4-I8{kF%|)X?nU^W`v~ z_YlN&S=IYuQFfSjlVz?*8vf31AoWoxc~xWYp3@Y3zhMRR!qguQ8R2u=dj>h+6H(%Y z(UMfk2ecy;8m5vWv<h6*N`AzHke|;1-Fgn2IL@s?*(vr`o(jCd( zX7WB!|2_Fm<9FrwtMgs{0sFS?6;WxADIfnIN8IZk6XsrRUQf4owIl1l?!75r8_rKV zk!2uf??CG9IDl0j-04j6a&ymkRUo!LE2rC-?v_?8_~$S8lKa$>nHZ_Zq0FDR9>KIj z`77w^Di6aDhwT;V0n5D6sO^6@Ydb8lwW8IMkwj2})9hixGvsf`I6}0-H`gUT^#pdT zc7}Ct#G|pEx`*KV9_vE|#-?rCulf2fy^iy=C4zW8ZZ_SmM?bB1)fpP&+xf%@IxPE= z+$=oi-Cm#Ae>KPHwHNx16${pnl%-&6u~Vliy=)Sf&C>A%~NB z?7cY9Oz*vVv!wq3TP2Ef!O-|tY2h!zCM;OBHvvTS8_KJ2X zHVXF?tbl#&pMC>*dO{@c*7s_A^JF7@Z8+@a9Y|~t%^fgC)=w+zjl!5Kn%N3dj80_} z?wtLXeRrSonjEd2{4m9lp6X=L5oGd?*_O3>ezwBBi0AD10Q+n^=Kl7@lBI*!4pcht z1A60Z&Vo-xjbXmJpa;YtD0nO}BDvh?>5o)b3ZJ!{?zV!vQUe%3=S4(D!rBmb%p(Gb zmK9&bx-$O98H&~bwqG8=yD0pBsFpW&-ns2HaQwrUy`{uW^B0?1m8grL zk!m5trF792WU&04?tY~!3bgAw-!rw_c3Kq;>+cMS+$)V$R}2q0@FEU1D93#1={Pkt z!@Kl&iTSO{x7qJX7$lS~6(uoTI_~{NmVG-@C9RP;{abczxPnRD zuC)lRTKGo_T;QCm1pJ|?YHmLVq91kcHl4hrkUYwi-w-$n+m}5FQgs;j}ET*fn^$ zAavblno~B?&vdo6xb(iD^xUnfo+wSoR4S#!$mb@?y!6=sSK3jWdq$hV02gU|!7 zi>ik_k4mkW8SSFfdOGd!ftcFR96-_5r=my~ej`*@X-YlDiGdv~7LHFebX zOtcuT$$1^zIU;HXHik18z+p`f_Pv+4W2wO5^|@}uOEZG-<-<(E6DQh=fmqU-zXzH! zz>QywcwGl8$AYfSb9O<(nWwac!%wxn*%&Az)fgxm#2 z{JN38ofWuLCSrpQD9#62PfBx0RJoTBM{^TIci4LO=1VzO%xEnm{!b!pV3IMQKM!m` zP&wR7Pmy$=-F-H@>H6gf|K{cwx^(RV+{Arw{zFS$t0cV#vO z7690xXJ>)-oz3H9qaWGdsp=9pSHL-i~& zm3QaGO29}h$I|T+RbikpZ%V`2*7DNa2+-Qg?7DGGiB0s7&l|^zcw0uk64Bn`*7kk) z3z1=}@LrQ;R(#6DUHlwpK|kh*FsrxSlH~3+0Qnj%p(UA%5$@SW3)1BNqfU_T7y5g) zf6rItVg_nurBMb<6;I{ddopyMD|{uu&%c0Ta^RldEUZ{51XP4m zvVU=Sd#BsPVKzEY?lUx{otk?JUMO(x*)Hh&g!~X4t#~k%YoQ@}!xKX*nk%p@6q}SM z@(GvivnRo02kxmOC89z1hhyle*;!beoekYyc=gwAaX4(xR<+K;+*9t;w=^xdraWm9 zCNOqB#)b9|ChFTmy6krp>Oy8I@NZq?*Ac8o0%JHk4(&Hm8qnRkhpCuK10)4mJFX!= z&pZTti015O0y(ayi2Y7+gs)XN-*w=wxFB8!LY?Xn2ax#!!qnjue(tRcpdNL+nMD#- z|0mu}SJ&x*ipF^o!@BGyf9xldoL;5wSj~1XIR{)~wr{yUJt_6Qos(``bbm7Gx4Hkw zd9?i@LfFUh9)spo=rLGFtXaPqk_!<>C6y$Q#I^079_b{)CHN{)C5eiTz8!mTV6lC0 z!ap)6=y5o>n39o_F_*FAx4h!e=yuSwdF!y{cYz(_<9>D+AOmLO-bOjvFzPQn!1NH; zRH<3=OKX6sJRbrvgIB{W7of4=lGhzb@-WUpPlL>}m8L7cK zCWFFQf%En6@5w?A>h%SYxok8%jQ6Yec5iVk20iUn2RoN;uWyKpOz?U5-S#L$CTAU5 zwu!*`!bL4_aXa3i-MQgyIZ=FkdJ0(|$x?2xnf=uEvd~z0-mOuU{geWa^4{M*GUX_0 zAes9(xSoW0%7LRmi zKf(9`2h}&xJ(QDFkJ^3mzG!P?=5B3SiE7IF)_zNGyEbW@W9^vOwqLf*@AAJ95S(sy zgQze2-HFGtj?O%pPKK(3X-TSc$lw!DHr7%Y7EW2(xok~yfg*HM7s1L z3?$@KEZI*7{c}a@!A=>9C}Fnufh5a&L1KizER+D1(w9U)hTGu-e%-#^$mJuWr-L^R z!l8)kk0~{a+US`$5D|+fpO_ee&4N8iyL|4i^#Y&*w7=ltDKKFL5BH8C2^`!DyIs57 zaHU9jl^*{GdhGvxL~`+uG$N_}zM?-ijwBi)L*;<8J)#dcySAArOVBKt?t2L0x7i$C zy0>Xl4cFK5%53zhc;mCJMz=q7)&dEisJA18quo`cyLKJ&~mwHulWgb1{ z+c3(;UuJa=VjylYQW;}6xbKuLb^eyTXBtPgWloy_s%$Fiw&bMa55@a{n)G5hxD^E z87D%}%r3gFWD^Q@R^BzNOi>TUQX!*eQg_UE1U(RRXVofA?&Yh=(W%O@m`(>O??>)GIiU3j4gUv;>Z8t$FEK(#N5jYSfne;8 zWz#T?$Jq%~iK9>qrV&X7aUN>t_>ivn2*zR58H^7TlBoW*yHNsFBjRvi-ipr}VCdy>Dsh5VJ{S!?i_Z z8l<_FG*O@!zcG~hqmEQ4JkDloX2l@E5mD!2`#F-B-TUQ8ELFd0hVB}zjA5f=hpm$4 z#Bc^*|9X4x2la2>47hz%Q~7q%u)4;1YnRy>!#3Bsv%5nw+Tdu z6ra$ZqLT17rWYgLPz3jKsUs17Ee2vPxlK~Y)eaNO0`Be09f-raF>Hv#uZQJlx`{Me zJ|Zy7e}i|sRP0l136l!5$`{TLm$x@#Mi8fyC#M@u>vahPwb^f{m^j?}CH%_rm9vT} z($uHqUHXsiOJr75nM$Q;et0D*B*YLDMiPP5sQ-yv>HzTol0K_|^OTQe;TQ7aD#$yZe zhW=zLPZ>h??708k(!k#$&h#R2EEhY9={(TyeAIBlVw(8`BZfQ=jb_*FT-k)4tFVro z6If_en%Q*e+zF6M!xL)Crqd`BE6$|iQhdg3c_5Gx05xIu(4lrtDeLtHfv} zIbEvjHttHR6een*(_r8FG3%(|y093nnN%X_JA)EhF^E1tyE#9BRi|>a>gc`T7{pJQ zA+V=Skl^$K)HUC`Jl%GE#u)8{R7$ejOG}J462%3tK|YBkyZGb+)6=u*Y4CGb4u@ML zKVyhTI2`$3wD4YpS(P@9Y~#{?Rqs_3n%Qid#gHD{;*FtElQXDX$U{qWM8NN=Hl5c+ z%E##w9s`px_2FZGxO={y%DZuTP zRb=9$<;~StCZzK^Z@tHEa)vpxFui{$jM#IrUMVm8gd1Ab#WZ*4@!9`nLv>X;nfA*K(z0_%t42JFUTydk zeJqLRi0BVT^Cn^+{?4Tsp)CbOe1Y<-d&`o1sm@^wL zBe1O)>EshDryZ8+hf*6x2u|WzR+j0>P8$}-yTxh6(YE~H@c2hremSUZ096Am0Xnp* z;_giEKa6?Pu{*Hjf8$mw@@^=VD^espaFjGFB)+KOyXX&}tD;_5eE*16j)apSEnrc5 z8h?_hK->G|76UN5F_8fNTCGSARtH(rk$aP7lW*hUXAajs$|fVbAj1|<*=4^&X@WX>-<%c|g`n z)5pUW=cU=tq?lpoXgDpxcVxR%v_5-@UkC#~l9+mLq%gpKrr_~11^3*KO0}+-0A$zS zj!D#H%Z({`c$kfjk#=vwzRdl*T9jSc(9^9?cnKbmfeGH48v%#@4#z>kFjn~cmoaPa8F zW6&yQ)W`u&DIxEGMDleQ$mdBG-?0w5I$sBJF5PrD?K(by8tM&{k!VH!_KA~uN3FZ3 zXwuRdFa=|<$Jhv z&)?|_j_7kSRccb1Sp?@^`+&` z%KSus)~!?4&D8+Td&^#EvwPEALBVVUA#=L>sSfemeD?OU_n)4HaRX#<)N(LJbQkE4 z$62BT2NMu&-yyu(rIi&yq28y ze9(LUAM>P*l|hLQkx3HNe}zM~JyCF~V`&`s!c+>8lR2_pIlPPpJqiop`>IM?sM?x( zvgNIc`~Gnj?^Ax4joohxL-Pmgarf>yTnq+iyuXE%xgT)C`jf_tC?@VwXfycS9al=h zYbhJ{O5eHc#tW1yP@9FN?x?woRm2>o)ie>}Q|-#34^Nm!2(Fy?lpSu;&I9+pyBeWm z-*elUxS8){-rcPbnnA*A^yFNRdB4SHe&QK}^l16w#qJGBnDy?<^PVcTZj2N7SEnnZ zFvo9ZM?Bs;&|b6QQfnhrj=cv?He0cQOdeB_(KAzb<_{%Zr)ID1j9}7$2>TQ{Y z;ZW3Teg4wf*nS%uioK(|JBan;Q%u~7Z?;Qm%K5EQt08j14KfMmg|tvz!?G`s#(C)@ zT6NFiexUp47aX&mvm&iF4tRavhl}pbF4&UCva>HYP+M1S1=D;W$$bSh@SCVh(Q+@9 zb*rB^QDYH(XabN!=}FvvTrCr?-=(wplys#f-igHHi0@P?po6}+&v<^T@5nTvQ*nE( zwf$}&m3w{1ukD#1IX4Fc)RGCf%0yB&xooN#@rICf?Jv43)8>z? z;yL?OnLIkQ3)@_M_XHk@`~X&3T%0QVfesauKn1QZ8tC~Xe2pszBYbRZjBnfT-jwp~ z^(E4FXP#YQ@;BYi%cp9cIz*rftP#9o0lv9od$4 zD*h%VO))xykIL-~>o02Q!H|z8n^VMZ&u?Tn+M`gN;b3BtlLNEm75MKDN&zAf#pm(9 zs2*JLR@2(&rSXY2h(+#Y?^ivCc&@!2^`qa2ZgR;76-$oGzV`n9mOiSxIrUx*%w;knZZRu;e8+m-Q6cX$*5bn$~@ z%3m-tKKV<>1lVl7#VVy?Eg0`~gznf1N!T}u#X46!&sB{#K##F-1G&nSScA|A`mBx_ z9829)jZ)3L@GIuZDYI|DCGt>%ApEN%<9BX&kDRG>aDeShkdY&OzGs4Vn6l=u{;iP z)RNne5lDt;`s=7Jn@_o9BsgK#%fN{U>HM0%baDrgAj5x+StbJ|Ueaw~(#zU&`q!Ho zGroLq(av26uY+n)ihIKB+$QwbKy>m1U<}v*Dr@Xq1@~8uvk;*-AQz+9ICX7k6(?G6 z?k~B-ifo~)&|(UzU&#Dr%a8FIz4wK;&xh{MRmp=q74qF(&d8)Mp5qzoadU*1}-{*;~x;MXlZdP0XWI zpYZXvQxrVazz=rRdS+~dX+Cvo2%@mCM=Pc{koapXZD=aAr;=d{|MPe(6e(67$~zW5 zg3wI-0(si)_8MmeyqjwC{&2y20gY1cOjmU(<#^fpj4>O+c2)Mkf9f(N2=Dc`TMh3u zulZl{lNDb*N*Um=y&lfx^+bz~5HKq=K^Jd(?&(FBw4L+EPBOpyK|I;rQ!p~auzW{) zin#ZWssP2r?nvOD zkI7#{KK|G<-IpZ{kgxy6f61VuBsU7v8AxU#|4HD8R8v;^!?$TE3LfoerIfR9`^KPVO-4RpO&ak(F8`b9W@+DeBxf)PBp;$>auw5(;u#tOHhoXl8B=&|%1e#gRoxF3Swk=gW2?;v z(0af$((YNN;SS${*wuW#R=3)6#E2J&_Yzg8iu2^biSBm#bZ=F|e)K^1|Et=oI6%mF zt71a>7n3MM83?t}tiGgu=@D284|j7uzSeA))+m!vnD|<1L9(*?oG4t)#+^L+aU7P( z^ho>1Y4Hj}Na!`tR_X+vTf|ntVRK>dGm3zH=tV#UJys5?hgewj>(zvQ9GMzICrUXd zitYwuMT3Mv@D(3*y_XIqTKq~L`|~u)Lgy`e3lw&C#!Yf& zKrG~nMD~pUQq}$EYJ(r?z6Wg5n9Hzze_R^OLK8bxXr%artMu?&2)Egmqi&6R>wu8X zY=c9)!0Dbm8^XaJoeoALevXNx-N!`}Pz;5wI;(Y0{s#t=6hXXp@E*v?si; z+p4E|c@)$@YdM@Omo6Ri6g>91w~w2qeP(FI3rjSd{PajU$YDrEy=?wWd+(EplGfb(0wHde~T`_}kBTL=Y3DZPzA zgB{sBF(IdkQ#NY^88hZC1**VWJn}f((bYG{8&l19 zw?4JRAjE9t#};b$SfL8Ys6O&y5SCIG|H6Py&mG#q^~) zw9o=vsnCAoA9bg{qU7aQ1(3$ppoTgvr*qwkB*v{8VZrnjs zFrCrbTOesJTLpEWQ+a(3(nU0aIv{ql1;}&`%L3kW*Dj`;51ntXQ7^4lRs5oHr@R+^ zz;lC)|6NcC7GNhm^0DT5{^wQ-Oq5tvh|M%B6!{>Rm<2B)i&Y_#3&fhaCyRJPqFH?{ z^Fm$ZlU03kRfxBNdcIO*!$1;inl~Yo&&~FbIyPmP!*V!R9G^iUy%e=Lp+HGCdhZ4L z8CCpLg|_saf=_{U;Pqvl%>C@$6F`Q9gs@2k@3Y1QNlMNr$8>EYe=R=OX3dIJUYBxX z>bdu*k&495D}HcKG&76utcDJWfRYxCI3O;bT5uKo*B0Q80bWP)47RrVYvt510?$;f zVLO_4_^`B7Db#*;^75b4cpC_8pgP64Qx>My!#l$ZdZj>+Aj(*VVYV1*nWm*uR+R*Y zO`hF-xT5QsD6iwur_6HoDUP*!7JDZ!W+Tb!`{xeNhXnF};;7}*vi#$xf*^adPufQnNQGD-ac+;gVR(?ST+SmxR^?oAXSUbw^IOGyrGPBi;+^E`lvfR)4PKHp-SP(YAK@_g~;j z<`K$?S!6sbIuAZgqSxs&%mNV$iVILl9F2Q&l|-I0Zved2aqYdpB;p08R|-d7t__$p z&!qcAO+&NsB_WgWwN>HATfdJ}!wZ8iG@;{yRc)>NKxSO2WB`*2fb_t*cMEYewEELL zaYH+OZ|KGEZ>C#6RBVd)uUUxy_=jXQt@sFX!k=VpE*R5>zooo>KP5%SpdbKJ!i{@lA(8@Lmy99teBloG;3md?GAyv zHI8dv6{ZA*J)9$M9d=l$8Yqzo#qB#MHHi+pwo&SJI^@YJPO`?z^zDuyeDRC=6RBp1 zb-vw8+G9sm{tE!m;aeHg4I?v9ZCVF&_-vUsqH=un*v-B>t5hzbJ&iN5V9}Ick4&VZ z^CVlSKdikHfhjPo?Eus?uvdLcmICg&4M=T5>^2(40_KwpEP=giQf4(>Tg7!~lb>1K zVr@H^d~7jo8u8-xc$0^<)3Egke(~;DIP;(S(+NPP$V$x4wO_|jxjKoBcQF}_^fJohdA}~U%8QA*j;^`BOe0bCHaixPws{4OU)CmdH!W2SOTvIouR>A55%1PrJjjL zbLjAhu`E4kJ{Z+xRN-hPqvx_|N%IHm<{gPOW(?HUGwbd9N z&?~h3j{jh5r$pZVzndd4oC;tqP1sN$i2WH+#Ezp2%;r`q=m8O@(J?tWF&Z+OIUL*Q zX&)QdX1qEVfZV;guLF_T2#N3~o!;?xMPix0<0{I&Pe6}Y>ovaKxt!Lw$4HS$ z67uu*+U~wR8HP}7o8DaySapmnt&W+WeDErMZ1{}j=NX~CYt~Qr(Fpyi$ z)B@mBLSRf{re|4LH!{sD3-S#JpK*k-DBNqXks-t`UBZep4;o0#F9;<7`02f81(GX0W znlM#lC@-WbHV-VK$NZq%pY|sXWxP-#EBjL2LL!s=f#jcJ6+w!VTs;dZ8NbaicUDzQ z_$+CT>(!usABqQ(0xE8$alb@GzczPT?^B(tasj8SFMD!Z+d7F5&RCJ3eAdDecj(24(Lt-r|Bx9nUjR>{u$>z)6Mu3sqHj3O z01g=dOSBpKOOh86=XEo!e>-7t(0(?IGsQv`+rehum+%dhBi+&-1`9D3!`9>&0JEo! z+LOV7bKJC5=|A;sT?v4v&>IZr>tEgh8Z9!Ddbyso5t6XfIP`AjpxGkC(0;C&RkhfV z8iynrRXo`|O^@zO;Ofo%C zWZ1FYVy*5fjF3%yh3CH5{TT{*W{;OqC4Z2Vn^qhDLMfBd03o{r*$^=O^%c-4i$1Ea zk{m`Stpr+UPMM$8)HV#}SjWVy%T}g|Wu5gR9mL~Wkis=gybY4%E);NqgA5qIhr0?ltN)P&Y8J$Y#D!?>XlnLD{r}c zTaJlXietd9u^j!3#*a?S4}Zywb~Qod%GG*3LtkFDn|cO1`hs7BP&{jh#+gQpJtn=0 z1wNy8XHz6bGgG$miRsx!-23AGHMbe_hgO5Qy${I*Xr*I%Jo7SIud~u+!SV+VhD@LQ zXOsULHWFQ|6evL{^p2Tm%eq?dr{T*bM?!qYDaM%`s&?75_ZtUXNt_nz6L}4dohwM+ zHm9ZeOnIwXl^BVrCifF`0NXD-%mR;C?t|wBa7wQAp@#uuIf)6{pR285!~Lp62UsK= zg*)ImTJ7?5pgcjOCwY|PeNim0p7O#Kz}0CO)l4{V$2%)W*46cL%6xxImunkXYC-rX zPy-0{*N>8Uyh6DSUrqo@Trg!cBUvlse%pzf!etX!YEYvTO~y-aIdvzqM#F9yy{A?t zCuu!dSx&%~DA0x9V)Q5sd-O#Z7DBJwppAN}SS!B1ERC=BTX8U+PL*?+LKd&G$LZ@4 z`*>!{=wSC0TX?S^_x0)F`t>EIT9FCw_T$w^y17hGjK@C)=kEGP+HYXa^uUDxkn)3e zI)Ld0(EG|Q@=4D^5LejZPS{hN#L|LA_uoPWUdB zcBb6WrKyLQ-6%dNEV@sab7NBs}bANL$~NBl4q{eUyAnRHblaQcJH3es=wqWx6s09S zhC+%6#j6Dmz3$kO+D#4BjJ?*Jnv6oe(P=L<9KOT4rQNtx!0Hef=Pfg+8-8N4q&KyG zv0zWo!q0BlY+I(x%6YysmDpY|jC=hi&vqNHNRG0LYr}`nd}-TF&J6!rEoVyc14Fk9 z^{mHKseyV2f?$A@Z_95gymTBll)tKbnG5Qdp>&=Jg5gH z`Dv(RBvHx@qtKPZzzm(r>X1Bjve>S8slUA5|Cz2^-0)}0)kPB)`n-L`nnjfR^Dj@m z9�HYuTj^BnDV)yC>ui#v+Y`eIa`hg}B*n?Ddkh)Vp;L7}?L8GTC zlQs*x+wOF1oeg*Y3CPB5Vq-u4CwY0)fk{D1|3|3FjX_^~q2*%kiwr8PAm!gr>sy^amQ}NNR3DPx(EH z5R?Ur<9R`3zzvk?@|9dBLi_kFRL&g*gg8k(J(!@b+kOOW1L- zc0dSAC=#IrSi;qeZn*Bbm+b5dEc=NJ0 zr|Djp<}0^Wt-h__7V|anfkR+Hdp839AVdJP8nol*q@z@bd&shrO317xv!dP|LFDdG z4ip9thM8??1FX=ZkekLRiiRxDc{Y=&FfUq;k1g)Mm-#vP!; z#v*-getbwc*_>@Zf1zJzMIC~%0p{9$)t_2NYBqD==PmH;i&~SK7l@seo_j3rbMGWV zdg+k#74(f}DeBE+p_Z<_%3+?!kpGfAm?b-lq(JDE?IM$Yy}kBGxBX2ERfS;{bx5Au z0DU}-^y-@3fiK<(*7qiW)QlsiSl(9MY3KJ0nAXa!1+pQJJrCadDf2t6@%`!V|FiRD zqEM*iXoEb>N_z!|O~7;Z`TL;Qi;**4Jp=J%3pm8#9JfU&gYR>3I(Gbo<#6iKnd`{% z4PUKG=gOT-MW%KA=|FhOr@Wa&VPW>$goI2Yf4q4(X_+X)TSL7>;| zSV`mk{6eX>5v<8KR1s|2gfDwbc74J9gfQAw(JeqnRU-4IN;Ko)4<2$uc8j)6ch2Tc z&hY(^PKZ()UVi{puDCW!qI*@dS`?{tAOW1q4u8x%l1znqcZnxP!xY;GvQ5kBKk z+#hprNWJL8({pHgJ3V3n#p_n^n4a{H#%+@-_DEe)xUJa&yfs&`TG4yoy9tW-9_25{ zY!0rw)#{v4y-JOhfx1BnSWj=G=1L2!YBXAz9S1El_rAj+Nc;;ccF$fSy_vIr%pl<( z9_-%58EWYfarJpsCb>{6&RqvUoTG_XW%saVkCaX z+#+wOS9gR!!Isf64A)a+S~;Go(9EUw_?4prk&_?np{*4K2jp*l;9fZ%S+Y?C$(R}D zI*Q8wAx8BM=Ru6)<+GuDA(7)it2>!sc(Sf`>)Y;kR!?~xbzq8!VRNz+PV;dwoE}pw ziga8s(hJ|oCQLI_%yk!84Q!{)o4~HxhVX_qVpc>-jbRpk__G0l!df~2#c zn5gu^&kkEd?7JBUZ->dT3M!UM&LtP~3QI}%-{%c}4B$&xYFl`}Y(M8;k>$Y760(U>E8iw~aD<^YlN zVVSALFquoh>f%1U@J*ogS|@7h#q3>>sMDdu9`XoJE$0X^iZ$FLXG(N$Q1-m{0YoGe zfj>hz;FhwKR*bk&mZ$8xoIN!61p8fdKa-~vr|?|f3x&^oOnG6HdvT7Qa;Ng564MMf z&=2(KSG)rH}EL%>9`SmI_Tx} z_iSt#C<=8?0@gBa&s(hu6AS#8A>ka?k%y&6a%eB%ZtYzd*1_*CZj{^gJ4eGS{A0C^ zeZ?s7DW0$kAqq1OZ~9KX_2kMh^PA~23 zIjB!b$edVJf0-&;NZ1nf@qzB^!j*3(TMJlbCVk0FR?HomW;XvqZ{)cZ5bPoCjc zISzBZ)tJuK$!~376uUZlggJuu(Jc%k09D@fs6{l(UB6813+^j(tTmamhP}~9+{nGo zFzX=3tD?Nl6mQpd#mz5Wd>;|&beU9Vco%7S{h9tP4Vm0`3K_4YH`sNGiI1!9)=hC3 z?@=EFQG_u1BTU~f3(0v+Q)0a;dAn)G)64Gwk=1+UVC6Mhs`Kcb<3cG^hID`ug9w>1YcWf_x`my|O6Yf%AQ$Fh4XsZ-x zt4DqBMG1B*S|*)gAMr0$ZwN=wdv2C+KIc`i-u*x9y=72b+wwjfBsgIpXmA1{L4pMc z?t$PoxN9K6f-^{Pf(;G{79hdh-CaU(celX;0}S%txkv6f=l<^3x84u8ilTOn)n04$ zUaR}*r=K>z7mT?}^&xQyLEY6fHdkZyoOfw@aus`cTS>aJF?d-YgZA*)8*kfH^2_0l z==Z=sLapBbQ5tSg2&oe1`bW;76!r}E0NzJXst^O{*?|?N**5iMHC1CvGDD@PQ!DzL zm-~uD2AWdtHKK9uvY3o6zBoO5KbiwKQc>`jeQer0eOQchCFY3Vt8MX2N^@U{MwV_N z;1zl zZ?RTdNl#Q~aO;M{$)HK`wi)WW@wq0#u(!UB4dB#l|UM`W6=OA&C`G!3e;52Z*p45 zWRYP}?*eq%X;xDluvaF@P3|x^uf*>0xCCSEa3hmVirw3}X%%9!)OzNOR~3E2M6LHF z$Tjmp8CYPI>HvP^l>n4a9L>+WBNS#&@dq$^QaGK7Ehvjj5ea~*7PDNd!@I8yab3ndR zR6Ng6a}%ZQJ6VGY(v&n^#}T<~Etcf9qe0m)p59Jpe&cW-J8D}WD!5ObOgUFoSua8N zo>8I}U$|AmScV4?4 zjzV2qS~TZc7mze7#YKpBWO1Ow>xiEtFF=37L#yG5` zE-zpX0UnR1mV*zQS0@J8U5jg137aMRjfxGs!&N4CW|j;s7zCTsRO+_*UEdvO9%EFb zf4Xh3&-(=Sp{X5^LggVpYW8!p7__3W&?Qe!%+}s|QB;WPpw8e@qskDbTM+eSsZW-W z&7vrX*YYWgg!&)?(~DJW|I=Iiy<>F3kc)G@crEIX`;K@?^C@o9KQ+XgV#P`i-R<-i z6%o$P*290GuYd3@Z}CGAsz)VW(>k;zk!9Frb$x^7UZH0FFk0vH0}x1lda(LHphzKi z-lBcIFusfnpiEV096*aAEsHS5aN2K`Def67<*rt{5AZNwqX#z@N^-6}TeiViHqvT6 z#Uxs5)YZE!zCqyeW8Di>f2p2Z`2O2_einioTsOOr@j?S_9j4*j(QmT3r=h2z7(d;5 zhy1UVkAlRSnNvg>C+xjwL)t*a*c`aVP*9n&IRO#E=t&_q^%rw+(ttCft+C zjM2w9V6mZ|JB_&;G>-1Mo1hy`i{39yQkh^Ft)0d2oS9uLuabP)%R+&yeEFa`ETaCy zGmyKf<77tLjH81o=2_E@^F1V6j7=)(-g0+E6`RpC`Xzu}{^%9^6doolu`9I5C$~RJ z4s0AyEn8ZoSC`@purLRfoCRDD%06opDo=iPO>Yw*gtMbU-0a0FU45vHXVR9wH{l^T z^DzWI-RLhD#e7{zhRs&h!jsf|Zi|^o5W=DmH#`x<>OFl0GULs~;^Y>uxjFZ}fjO>l zka=t}R|^&jcNf{d*JpPGH4ex_@y{&XZ;Te1m%btRec>cTiaVWsV+O-GtA6-FjXa?X z_OnlD8c*98kGZGj;4pz@%?6`Kw0Pg=-NV2SDmOHUnvJeF5YrOs(aGIQj`Jj5qPw9v zB;eT>Rti&?X>?}T#-f}4c&l;vxlx_X-c7Ulp_WBogs=A|i;V-rX-SCNZ=;y|-JBBwTP$tAR&NOB&vbAJG<(53Z7Tg=YnoZ~qnJO&}BGL;YxZ@v>t_yR>DtvF2p&Z2;e zBMJTf+~_y#1?O`3p3P#&2gSJvpgUr<@=`MF2EAc_SSV6ZJuHrdiHQxSVv<`|g3M=8 zI#rT^btXwPYE$ONc4Fbhcf#oPCV+A>53K+BMSUAXohQv2=b>bXt_qJGQ;u-dAT^v3 z=g{oo`Sv(ZvvadsXHCi8`Hv7Y&=*iSzhYoP2MsEnCXKg|zc?nKAUU>DUEBlr>6_w% zK!TKJM<0A>Ve5E#ER8Z>;-d7<5MfXLdrs^3WZMTjA7|q<3SSS+u9!)hZ4?QQu*7*g)U1zVmB+HlIL`9AVEQf32(eF|UpE9mff~NMbvA z1b}K{$4spSE5DwH`+jz3%*>Va48m&Ze$E8#i?hJcmXexkc_Lp0APf`^HY!boEeT8% z6n-rYA(YRF&$qf0Nj~x2I`^ZH(~${6|3Ym1*Or6H2;!jA?jztJTBji>QZ+$q-)eN< zpBB;ju}C!;Bze{yC5M?NpG4>0Btrb444In4U*vL>Ul5g^KKB&{0l|eS*?HHLhz@P2 zhAB1IIrJ%gYx6;ZqOeYn4QKrR>STcB11cG~Q-c&tm1xf;kG(`8xq4vs%;96lF~`+p z1N8vHu4P|UnYMYPbq_?e146FRa4%@n=zem{rpfb)rlVemJKs7QXBAlzZ>=xLWRf=3RT!G44CUgu3i}BeBlpkfXf8wL#3%;6UJlgyDnm;Ylqj(klYDnb za0%;KYI2TVK67iy5NGx7Jq`N0PtIdIawzpUGb{h;$!(oxM#SOrW-5l5u&}>te%XH6 z;nRu~)oeGsWmi@Ae&5qqXXu~J2d1>BN^?&zk+8t89(L^0<{Vv|nIDOAr+I55_wH>M zDE*P0{0$2A9ap3=k>td<9}lL=1F#xvJ0|*V>66n796p!9;K#!E@V!lNs3|~wM-tzk z+hrzOQ5Wwd6wMVCs}$|$^-#a3zCS_g=6D5Fht!dm;`dLny+IQy zLV&QxT4nIV$Uw6L-%zutEt8$E(<+&Y@Jcy!1Hfr9X`y;HV;KG5Fl}% zi%TW1{xGmZTqi-_{*HnHwM_t;+1%sbEWpw=r2QYlY!k z1_|Li2yMy4BR@{90wnFO0G}bMbzyqHE^M%{hcN9GkV_~NjuLv*S^SdP%q^>_-t>mt znh(oNvsjM^0%#Z0-_8qwh!qwe(Z?)R@ILv}ICc@8;*r*07BDZ6!5I?!RCL)^E7wI0E_N@+(A3sz zu@jAP0L;r(5KWM)VPT|;|7SvxK#{wT<^EykaoJ^U%DH~2{U_otWkuqbqaTTFw_haK z7y55aixVmG*>vkcVYIuvHhM~T#`no-(^gyXKN|O%24NMjyR|$wEbHDgKA6HYs8}4F zgUsqeivL#N<2x=-lPOsqO4OeV9eZiK*8~m9L!D(7d>^&qM+qRRi=LLHa&OcNh$Hli z&9j1>i#C>SNe`dWt)OgYwLPxv8QDWYvpQ~{edf|lZxpokuw4{|d%c@;^JiHgGzs4@ zl2ZZsbG0hbi85yPv(- zx4Gg8A^D!!g54nEbz8_etVSdLnST3cxGS3bU^s6Q{%sUnf753@+zkb^imfQDaYXkL z2krRSx`((Jl3UixnVO}$C#KyB33wA;yl;9*6cyU*pF^sXjtnR|4Ul-o(G|iWV^sW& z&U2Bdsp9y$QfhYh1+#~fwp0Uo#L%7Q$QGmA&~L5~FE9584xKYFYo0vMbnJ`in&<6l zn9nt)EWpKjSg2pqr^CbLCOy;fT9@0^tK_pwpZnF-od0^VO3SX~O@U1v)({_%*j`+P z!hO=_`G&Gfl@1zPRE?|7=}0%12NFsW81he^;TX4Ftc5jn!P8vqhRy%(rXxj%VAi}K z$?6fRc?Qj2;Ge?k%qnR?NO|>!C-O5jK3I7-!lX2Swn1dxzBlOYMP0ACf2mhW)prCD zv9Iww9#WbO!?-Nb$)%9M*lSeWu%D@PZvX^b2Y0cl$ra+zeH*|86b5Uyt09%c}6J&zh{Xhca%dtAfrrUJRtFwM460O0Wvz~NMh9aF#3}?#Os&S%I z>#6)~k#dJY6m-0SR;b9->+Y~TcIkkgVx0;D&E^+9B}Gi<_f4y;HKH%E6LcCTOWogp zir2EVL!o7*1z*XxD5|A8-F))?m5OxEK*qq8ODKtXb+#wF2ms^;)lIv?It8$$m&0|D z2WxoGT=+G_Cs(|0&7I`&Ona16E|U~r2ZfV;6Km3;#l;5<;bBGyNDg1MNy?q=0r_ze zmJ-XC0s^$pt`9CTj^Bw&cAtj$(a1y^X1a0keR=O+WY9qSCB@g*;4wzy=7pQfBEmkY zRiIk@lJh;F6mT=oqFp_^TkWl{WZXHsZT_Uh15m^$0Laa|=nJcAT?IA;*&30}`m#T% z1iZ`7np=4SSaV*y&1lzTl>c0;gv}BKDGv2`UaXH$n#|p6wC}?jh%K)67@j^g3QCr0 zBDZEzny2K1EYjUjxgQMqj-F+os}^b*d+A5%j=+%M+Z}NC>l!@M@IxU=*s0OozjN!r zNghRkDVZXJcDx&*iFiBAs8&W`%*X{sHfQuNo!DxFG2(;c`usv_C~w^M_5i^y zk{n>0oRlARbu$;!`oR5Gb(q6R)vod;R@jTbuIMczv&dv71M__m*AD4&{JZNsTdVWK z;`iGWBHpUlnz;t>EyLKsCyCD*Q>YH+#?1BEu|a9C?)b!fB*jzMsgnViE54j*>rrPA z`eB1yGsR^r%=>hb(l0%B!5J}x#BfjpSE+HqB|$0-Vr`tP>`2+@!Pmpdl`2;W>1{A>!cji{K-ABdK93S8H0T zVUET7M=zZ?*Vj85SV{dEHx6o?CWqNOU#fA8HLKHdFONpkQV0T z%Vu8G=>j50>j}?qi6kACcxeY*mnOiGxfJnx>c#|(rV8AKaN$Gn;Vn}r@e!Z#?HpJS_ug987NP1EWm5F*x%0%H$RBqU2ZdGV==0hpnx3DNXNITw-;i) z!D0*c4kc<7mVcAmgo5!==KAKUU^t*Nj0tjH$hmyBrA~KS%dSc>9P&A-L^r2i@N8N_ zQ;Rzzdk|4meHRr%<8bNj>#L zs&`B1TQkMZT<$VA$F%drL_|bCXvM*v_u>86H`F6{xMdkvUR zGOkyzZHZhyLIaG%jiT(fa<6C!@u~5T@;T~?uK8@#lLX$c*9td7+;T(^S-668=lLmm zG=`OLR-(KEXf0=HiCi#;IkglSaOn2OUki@m5?tUlvS;E^B*^b(J){j(oLV2x<9dJ5 ztHS5JTS=(AN{Y&vgV%!kfPv06m}GsWLt0(2@-&9e@kw7IFVysnx+*Re^sdccTK!YV z-T3tT-~-{dONQ>IkNvT|BEi-^GEV6rfArz+sgRA)Z*o`7f#{#cZc?L6FJ|F>OoRld zEa_2zbjZ`Tx{wE!=T%By=In2v6nxoDlSjhs0vtZgjgiQDU%VI-&o2(K7nLp!ktJ*Q zz* z#3IQF7yBDU>rc`qa6#kz@E0|N0QhIjiPv|J-M)wbMd#y;*6=DXkj1u)5&aJ)xH6QVS{^}DxJshL8?4nGH zbo#fGEUJZXGUzGC%1jx}Do$GcC`G(EU-!gB`Ii70=2@@4a&Vq3J$9E@vpNS?wP)8X z{lh*N*sRS0^k|XbwFf}cYJMX}gObH98*VdOo7NVenZ4poO~L0ee{#+OKJn;F;*bbk z?TP(2_3*dsygRC@eCNn=co3gl%9&u$GO82z-s!&8DzzUf3ovXCA9&&sY7p1QiO87ii#q4|BYwL+zaTYOxB>17p zUaIHOj};ooitRt9gg(j*RW+PZdzE%AI_7gLB|yfBJkXI*S0`*z^Q$H0jz8jZT26ki z5J+s`s=rC&Ayd}?$a%anfL#6}I(=OqjMgue{|DCgi@ie$)IEHQ+)G*gPOwr?xr`|( zep|9EO?@k*W=&Oo6uXB1kBvlz=8_;p=3gwga_OS{d=sqrS5) ztcD*3RUS<&`px|eJ(It)K^9y)P(2cVj6~&WU89VSy<}F*U)Qd9wF?ye9=;GvK5c0JTbwf9C;xlGxe# zg9vmA@bBQC(XIEYQ#lO-pGg%6E9;loXCSNUrAov;Iir7@u%~zBq|^-e#gNchS$inQ4hpXpI>^6z){Sa#3FywR zX_n}NhgXTc(genZQn)N#yhN(4XSB*K2dodaUZ5y(uZQabEJvWwTe8@oDL+hVnT%OC z`|H^A$US@LPztx<`Tkrn$#K(GM9!q~m7Se6p5ij)rrdK~aoX1^3+? zAi(-_XUwBcKY3HNQbzu}sAIZ8B4345Lq*sG6&U;%8dXVp-U8(O8Ba2IIxz3sh|E`2 z{a1XYEB16fH)%Sg zxxLxxbE?D4fF(|_Yq?Xvd|OZ?JUICLtS#&eG;j-Fjit))d^=@R897nu;6(;-1t`lc z&V%2e34BFGf^*Pv)M*Fn>A;BYA0r8yO;71WvELYnIvN&B1|7>fHSATBpP^@YHGJxE z_u@DC%0yT?R5+^Uxm>JOr5KgNN_ZK6L>o452NK;!bCDx#(36$?MctsdFBFmp&~bNp zei4_VcloDrx0azZB@)3dtU>@rCY0{UqiPb<)^qlz8-oLIi?8m}$4N<3mlqpO^b?uy zU-VH;StBAB>ff97oU_Io%|3^IBxYvD<=v93FzL@vDPX!Z4n2_&i@U)jW-qG7o`QRUhzAJq!b)=3 z7C@4WF3zvd7vZrwktfU2n!_z*M=P}+Da@Fk{g!3c3o_?^rbUx}eRP`c;r@xh7`^FE z1ls>8jJPDqYL~(Y^z`shMRa(L+A&>x(1oPWf*I)n+Q`}X4%Z1H=_62&R-wsoYgqcG zPW`E&4%Hsw1F_N&55lQ4acOUBw)ue%MUsF|na42e$d>sc$wa<-jHpyOU`l>on%~vd zdd_Od7ncYS)=cW9AHrmU_AV!QMKIHpTsye#Z{o)FE>-lT!jkLc|7kn43?P+{h@Qyw zRl2B*M>@cKel`1OGWl95>4ko^&5}7QiAtKrbYy5k_p*DC^98tAeX($IOhx1tsA8hk z=A9L9^jkqW1mK{bjWs@KJ}2=Egez{K;AL|Zp!lu8*|S1KxOONhU$f|qhOSyTHuY_{ zJ*+E?s4RdVLs;Y~TSDM2%KQRNX_JV3ZLlcXFsYh~ZPAI_RfmG7(;;KCIR-tDdiGjN z2}gf$E>bKgispS?7c6Rih*;0#dX7vciN!uh;CHPB!sK}MIBMl{f|`L`JX1ndF`IBB zqtYjHYE{uwk^DWQVj`G;9;4&om#h}X;iFpO&3j8X?6b2Zd!;@zqYOVUlX+Thg%u%& zpUmY^nuZS$XaZ2nd&p$pafdNF8BLcFp9nm@Pouu5z-^y?i-HE|FxB2Uq?0NFKoc<} zUbUjctKMvfi3F&jLxG2ueX6*NKlHg;VA-{*EC2S@VBwJm)f&Z`%aS3W$Ri}jLHFp< z+0KOiXqPa;gHhyGJ6}*HLNv&ddKih|hP=b;rT40c#fbUU*7R#CWU^_1ES7M4J*SoE zg{+M+fuNaoHFxi#7F@=(z=21)-|3g6k(eHRA%C&%2qd?s$c6)+efdxgqUwoEHgWj} zlnR`B?cy|O*O;J;Ihr-4I*%&~NvqWx3iHHhduS7b05DmbVyU=O?Dw0c^uiMwxI=#3k&SW(&YwH?UUwH_{Wf{#vupg;2QfHq9qU@4i&{?gXokLH0rXzbTGfu; zMv=unOrU<79sKx1>*-ci4Vv|XBgK4`tn$_@R%MCWXYEGR2auNap;+C+Nau9s?pJ~c zDKMAUO?-}iecBP+>81=C4+5ou;V4%e)spTAAjxIfgn_ghcnjSI2^M0k3-jJEIE8L4 z2k~>SFE-d!rQy5ziJ^AgeAs*W8q=?)Kpe}3{H)_JK?V~k5AMHk=1zk{g8V(*M`6;y z$OVrZahtT%Y?^3t99vaycu1uR{y4CyV>{cZ(1K3m*0z?f;!j=)PnbcXGGYcl<*6P{ zsSpjeCSf6cd=in&4la?NBUT`PbKJeZbx3%#+H@?Fju>|s*j})=eHb4i@?3)=xlnL{ z|JktSskhi;w^CyvoMEQM<9v-g8W6rQMnP9q@H9nX;HpL)|Io0c@l9fjMs*{|@` zOx_e1tI8cqM6#Inwgp0u14xB$X;IsjKLw_kfSyH|52t}Yz7i^edaYaeMXd~hVx|E~ zMxtQP_u0;I@&P;_29t34)yc35vmwTrckP<*CnchPEg8*Qx9f&aa_-c#5^sNk2= z-kA~xamjRbFLJ~I^w>f}DOzd2S*r|w2Z;D(@lWn^3x!8deJdip ziPIt3CEmkxO9#FB!OC)^iz1eB_HzGpvVHCGLr9|s)70La&EhgeBg~98OpD0O@zDxq5A7Q~N{9!2VBWI0lTQ zS+Q+g-#rBZQdeRFx`FP|g)i-XNg2s8k~mpsn-MM*J9~IW4VIE4XNa=UVbj9w&%^d_ zlZ^;@a)#@`4rR%EXe!q5G}1$Ugu~GYuEYJh!eVUB#)owaDX2W^;f0{&C2RwYBR#Gl zWV~+scOf9fAjFs+<9ZFh`zUu#;+XJ?&Q!L!S`VaAy>B(4)ZbIpMK?S24XHn9_c`ik zo7A|Z^`Yf9t1Bs6Hh7d(1$8HH$B2HMBsrngQ^Vpo+enbEn|fq+LPONZ%QmxLylpD_%&{_ngiPzVj1KYKMf40$O@5p) zoj?&D8j-c&ey(XM11oN2t~$pq_tW~Y@^hE})MX1G2vH#^L?C)jQ$NAqH@Aq+QYbGx`A{y=x}}zaJp91 zOLI#y`fuSBJHPxz5z+WwhjfUmvZbf}D@1JkB>U8uQD5f4&VeaaYSy-YnIpiqE(F31 z8;w0d$fVyKj-(8`WHc6OKBLCa3QKqG!K0Hd@xaF5OmX=jO=70p*`a_N6G|p-4=y(w zI_hNnj{SpuxWa9tyQlEOS9-z03~A=B!{5VYsuRA*no$X;v`!i|)ouu2SuY7o^$WF{O^1%H^_$NTAN;<0FIQGWQh&AT4n8v zE+c!V8v}ce4K)#r2R+J|^Il|~YyqX>+mdPJoz((%Ew4elR+2*M=bdcdc}u*ALoG)NqrKJq_zgRe9o6@Wn?u;^LU5`OFGCkG@+ zv~$0QP`5j#xvjT-5sdyZ{9|Rwtw=DWQ5C4I8vnR;)}>6RUVCtTnIvfFON9sBM3;e*&ee-FA_zix$gt@V*E)m#*Q6-o5N#{3;4Iz;>M5rDCPOFo zoGx&yA5$E&f_+bbN;(q`8VPVyMDl7(Ulzu_{9i41@O?G0V-~1^46K3M*}{h_1(~%J zzSWa}jMVwFZC=0M z<&AO%p~H4u!sfp*QNUE)H+XDji%};d%B8?Efh+!qB3F^gx*hpjwkF!}yXg`x!L(4K zGnhiPp}w>!pUwVIy-eDyW0e&3%WbWZAK^=LH%SW`${BkR2j4u-WGzV2)pBGjYpF%< zuA~!hKm_&nBk%QVJIp1LNcN{rj-s=Kp+WW?Ml9-w{+%A04=B$g7^2!xO3W*Q_}@`1 zCe(pt*d~YWTt9lk*ACm3mxcd))PBDJfqs>_4Q}HT1a?keEV#%qf}c4w;EvQo@>`j!fu=UxovA(8u!f z2@3I3ey}Z4ma2%{WGu372)$3*i7>wENh^6Eg{s`zmRvMfIy3hqeNW;{SSH++;TBXb z!qY<6=(gl>qs<(genbBdk@`-AXX_uZwknBlW%T>{3nQH-3;%GHMFR{5Ctmfux7G#o zweABh@F$byHKhd-Ws77-RO04>jJl=|N7Yx^a-N743D{w4SGp=U>6pKD*sYua&dxzV zP0f5X?o)q*ZLr+;v~u%R@xFv2y#k??d3YCRC(`;ZR(I6H2gb$h>Z5wL1>@U`OBWyS zva%Mw3fM1~ z7HaUF8ZF!A^YT}nxugqJDHJ*%4Zh&9+LbTasxzBmI$*V)oz203zHo2a%$@f=2Z=)j z20I2Y{2ne+!_Z28aI8JiEkKA&Nb?t9m9qytjFdGy{L1-FMUZaukbs=W1{^4!SyVP- z$&;t|G56i0@W5R!(5~yo<*5Vh+-76AXwZfZHtt_nTPxzni_xN`XCo%{KqjX z{%1f4ge6?)L(Mg^)``z!erNAQg0~MosJ5M3DHG8kg~r7)Wn9;t7K1NKDUk;NyoH^d zlZ(3R$e`ZiEMK|>D558f&BEfFIvP1NTk^h3zFoevKf)k;v5sAUDF|Z%vlJw*q@n~F zne&E30)0ioyuhueMW1=A&o5=q%#}d4JS@7+IyBy`H1Ee-G7@Iuo{n|pV=bcl3&%P< z%;~r6HyunxK{GCC#u%Y1!|CD72swIpWjeS-72JJu+!qo(dDV@(r(+VWy)*1(JNu-o z65xv@fKW@s*Ml5+)!DAif4+oIOme5?z41q zNz2U)L7af=8`h)Ey|0p-j+7cq$M@SkO~}$PACImi9eQ8nokT@o-*xkcTLNIQG0#(M zASfj~?BAbRZLh4cS=wb?(T+f9uu-UZ~T*fc7G5Xt#m+XlNNffZlj(?$u;gEp|4$( zkF_K2EcG)?&zg5Uk5*=Oq1;`kG$U#Pmm|`0)d_&_bPx#jfOHm!e%`iio#Uwcm$@7c>nt;gpR@t-DDCVV`-5*<|c} z)_XZvVV{18s;@SlR+H-ssL0*UZ&3*_tYTq^3Bj7d&17c-J(YZ@*O!H+9dZF(+no^-2O(8{FV{!EM~8S6#pzQ3 zMB0ZGUtt#M1cw8qa zTzva^d6wuTMcchfTzP?Dl^T_vv_0&o4CC$O*EDe3gP#36Givw4EMt$kD_mVv5yri* zsGMtj7S)f#k7&IL?!ZrNV+ z`sh@hYOJs~1ITzwr$KRvWK>bVT{PO)bu$v*#5EcBGE$c1@$aC4c z;2|o5`H9;T6tq|aIQCMrA`KLXmU1)NOPf;dJW+7Eem&)~;aR`X!K!pVOJ6FFdMB%I z0}%&5uYm+XA5dm)Z48+^*Y`_rsB4T};HeM0lU5f)4K=fks{M<~^)X(t z2E_$}j^mvV+tq1f9!>6YO^3{}aZI^t zitjY-y1V30hC*(N?#@WmX}J@=uXqpU2UI4e`{-Fug=03ouo)r@w>=nDN%PLEDVBdb z2?e|NE*GJ9H1}F=+va*(!;CSUyX7XD^j{_%QwYu087S_?)E=*oL!Ku)EA#jp=z8Ly?z0urM1Iv&tEo2T4DSl`!P2J4N9iElND8F=Y?0{X^F8_r$X33H`t1a#0SysxG3M;6IDG{~r0B z+%Gh}@8i9EZYmX@dv7K(qo<60u6Hywdbt}#o7|H;4nC0oN2-ws^o!cS!PlqSR<-sF z5bgZ>9$Dm$Uqjxu7`xvQ>*Mu8)3?n*{Wq9qN1iX@pQ!QirwRX!O99+cKq{cVRr+`n z>DRB*1l_}mBaPlK!BjIZW>Wv9TEY-O6gQSL+}l5~r^hssnQ6(slDDq#Pwbx_^b#OY z_;RgL{dy=tgh=$`(!#~V&DREp(EQXzCQld53A1FLJnC_W+K2zVg(%TwAj^9HdpP9R z6p8*ojS8S9qGVJ`&Ff%N$wfgsE)2;xf!v%)eKkM5t8k^VSAA$m;nO+-IUE%j#oqyc z%~zKKbp;tovrON9nHB?~;wy)mNOU5P3dmIafJl2Bo-K>Cv<1B5nS;G|ZE$VbvV6_? zN#3HlZ*$OYsZmq+WNYBzn&Xk^KOG4g-ygrEZBxmhziKahweUaOJMWc9bt3Qm9)gpj zqVmjS;tAf9-$>Yh`5!{*yIv-cBK=$OaA(jxWVjkl7_J!Azt;HI{QU6;1AUR%d>~!@ zr?%7|Z~XoVP}?uny`wJEseicde_FykAWt<&8Tl^oZ$#%m-k5&2YL!zj^#CzIEp!$C!{QhOU_QCh0I9*?s6aB~h{q`YWj9)!9 z{Me`(6#qT+Kj+}r#qtBdcwqKs%zu0JFOz{U59kn~8Vb6k{+DsKQUX%7^DDg?{~r2Z zNkAV^>5>35$7puj=l?I`{2&0#bzMhv(eIDxU#94{S0veio5c4#o_78(<3w@=C_;|G zk9GgSiTGta|N09t0&bG-Vzq4Vzm4;M2JzdX|IZ+Pe^UNu5dSlXzipcTGl>6R8ARs1 zRZtPV3-UlhQPhZ`=QC^))@X=8b*wbCyA~EvJwq9u!0QORh5ECrE8C@J{YXlI=h_t( z(a85t25cq@UN6);#}HyaNcfLajb;ZQgFJd4Xh?C=7K~y7Xx>{40j;2~-iJfCe48;- z1l?tUPJhFNM;l4UW z0mPxv2Ee1`nbc2+?IufZq|43vDC}-Rem(>`baak401z8E2jv zgwITKu2r&?&LOtmo2I|Gv!tX7v~)=b9O&-~BkB0Yj6VW3AJBYxUIBC}8Lg0ZwHj&s zWR&_JqFcZz{4rH@8WI!FqM~;nH2mm@KaNG4&WKVYgwL16Lx>S7avrk3*dz^f(qK?g zG4|Q(^elb7-8++$@dh#$7Gyl zQOc`YsUkiGs^4UZ;$)n5M$N%&fnQ~!+p8c?9mg|jtghDCV1D= zeKgz&y)W7tVx+r9ua2^OPxkE8?rYx|Ia(?u0^{v>T~dlGGwp#;j8(lQ0lHundGeTi z^*rv<;>$NNZWF#b5~^z%Yoc=g)!9u%@#;0u4enWT%{%TnHYYi;XWHdoowk2yz}}2% zg}tLfzpcTK71o`bskMp)Bx|Ywt^2mUYpGDe1Y*|Da&tVcaspT_BegWasm z80;d}Sn9;r2)Eb>~qoTv3lpd?X&69z+Mi^?}C@ctI%>U#Dke~=VaT5 z4l_)-4sp-abbzLnwB{3|eXF6klJS?PB>iv^`8b<7m)^7{$PnZKRT5yQiGWI#tVGug z6DkmY%24f|=fZX)QyUuVuT%Tq*BP~mS9o4kE)6=HZnOttbDqI>IITv_X7b&BN#-85 ztIwNsh05+tvVI#9az%vWZ8<4{bL|V31^CT(4>|3Zjba@es%^}A8Bw}+lCl+&bMsn< z`+v2q>y1*nkI4{z=v?%f`zr+A^Y!)Gt5-GmIi_FvaJ(XB)iLo-EYK`qa$Q=`sTcmi zXC^l1Z72q!=6+vmHk>BWe7Pw=b~iD#`O?0nQe~+H1{I;>yA9h4ZuY+Gfab&Ceur#0 z!(mKdrh>zHN7b+P#`>Fw>3~w)0Pm7Fn_S{^uHnF|T69=rMPB7w2sBQ4&v*Z5)G9lgb)pX$q#k#imy8xk*@E{kO*FnmQFviHrb6!_gZ}TFH_ds z(*8?e@yihhar&C=O3evK8TAfz-hN1JaCx0t&ueHAL9AJDK z9=vjAxyom0k<#MR!v@|ZH!!qfttoZL<2+tB-g0*9aS-Ne+I`b>C~M7CfBmnvH5(LS?69ELT81+Pge{4K}6jVH{!M` ztJwQXKJeEOpPjwHaHbjq%YD7Zbv*ZrXjnGshVzGSM%z))cyg5-U!+vhm9HS$0tItXDgDEhcgVlQ!{(e@JG^D z+<82w8L7l`o0`Yq!#e0Dy9K)qbNOcxmVwSoY5KZNlAJZQ4SfT%mVXb3MOEq8ov-v- zI&5YJbuCX^Mc?}fJe+GTX}Rr|qrraBjZ4nshG}+gdX)JHNCK@bciCH>PWG$ysI^wq zSiM8v<;vcipURU{Mkbr>l-oklr=IllcZjUj0i+*v>xHgsmo}%E@St0errey}%OX;A z0hF@9V03_x0tm4jrn)XoO|1eZjRYYl=h;?DuBP-S=>383*1ST+=kfDRtM~oXB3@VF#1Q}dYr}8YB;N;2q3+l`lRUx~_Fp^;1dFaQ z&2sVZVX?Fwg~8o#C^O^Pd~tAB$?j#44C8h{2B$tMqf_O(2mqB@&FFskz4rS*l!E1; zX-Wd#!?#oLqA&M3EANZpiePCC4Q>4x4mBJG8lUe{b)4?E1yjW%p`dFwm=@#R?K_rG z3lFB=6HA~dEa_@6lx;rAkF?jM!(#i#iZCKD&qFaN&pQ19_II`39XDYA zt=3QTg55W4@JhhDb}Ky>KGj}np#;R*;8rVZb8KL`m2uX6uk_*#*Q1Q8}A~o%iNj=m5)AfH9SdIOPk=Zxs$86UZ`qADer7Sen^oW+({y? z+1J;x73_S+=o_rL=WG6_Xr_wq?o`SOmbJ}z<@2r`H>k_s_E@ylr3CaY1|>bnqB7YsE=(fcGuxXiI(-`S#h=v7rK3__&woLn zX4?`v_F@Vyd?CZg!XA0HF>lcfm1EYx-kx9BX|`r*1H;8W5xOLmZ>nj|iOY8#|ZEi5<;dY|jn=Ay(T z2U?r^VeLmGR*L%myg9qBhOj>BvW1} zDAOWt{ISQ!N=Q27wBwon;+<0t8R7iXrAz`!RMTw{;hx9^nJ-2n*>9rhnD``YA!Wlh zhVN)&&i0RMPZIdDg)ElOG_$&G>IdHxY7EV7_@cu|+{(0o2Z`MGrpR1zkHgsCO-6C@ z6{fCsx!GhxxkVpcNJrqa)UDz4*uhU+dw2wofO8yd>HPw^&NO`?lc;IZcoI4rf@9HOUf|e#+%6q zEmM7dR|5Gm#}k@%IO*{{+z$A9hAosVdGmc@DAWycQ1fYyLtBh93W%@vBxtoX3O$q8 zDoh3wAAdXdnUOT~C`){yrdD=9WujZ{wx|@#c(*_CO@qlH0E{CYF4E~Q;Jh0b-18_+ z^K^?%8R_Qmhs{ebC=DGMCYv(ghKF(*wIVoGyHF&9bl1Ga2?#spJsxZfC2OLOO1A)! zU2&65y2lA-MaxTSZ>J>o6fT=lQX=ZL>HSXw!JvN@T`0dZIvYmM#8heQ!|xQ8D2g`7Wij|O6E`~By+S~!KbBGH zx!Z`uq;LZ(xIMmw=F0afoJ6ak=VA8AH>Gq%ps)AiZ|WKfDb!S~$2LbXZM8)~iTgQe zyDLQ0)&-yYNtQODHs)PK)IKhNC_W|XlEGH4M;)FShdGKE5ta0-AcZx-l7bIE(H~I! z&}N`TU8V}w+Do|n{Aju&r=~}TG5(B*O3nqzJ^M3^LWUl)mFi84X+~;@$6%p@n^_tw zGpS+jUa8Ym?|kMY4iQmG6}BlNKDk2Chw%-JE(m_sc0h9v-aqhpP2E8etZ{wcRv2j3L|DWL@L6~y^TRH+ z2Y?5gdKVc$kK7K#{|x~PZ^MeJMv2-BL1I75RJ3BDitfCvV%qFa9X^Q%q!&Drmz|WX zWm4$VHE&7*yIu3ymxoMJjR=32b!F9(n~d{ah= zQYwwcYjy}hWVnZPopL|n*5odB0VDRZV%V8mm0~rSy&{vER=1-z5LD#YF+&jPs0;8A z-a(F=3c5U8v|e=91DdR3O&Z!ny@*e@hL5iGgQnvrRSFd;k6$5E(LpZ03-3t~9@>YK zxh6n-A3QmTthHL}UF zee0zur_3b>IED0#Ibz|j>#Z*EE?PP!jt>AoF`^bbzfj(brx^!}FO9R=3faX-&4>>X zrglzoNTApkCZzvs+@)cEf+#xJi*2kt^-1JXIrTr&%I^2!SH!jivY7ROVr}0A(B)<@ z{^|$z4f;#$<_;95%Bl4i7r5g;Ni=3!dZRgy9qT`i8b92a_7vW}yejpz@AJ8GWiIZV zWwl?*9Iu4DlgZ<^{@D&w*_Ffqk6+$4I(>229jf59VSAn}i)GZ78m;)1KxFP@=V{0b z=BQgq5SLm#Nf}R-&v_eMVO;aR;fwoAg z@VB2(_~W?S@=K3iv1e{)Jg}&PsDcD5Tw>p9{S^1oig!$ToHjqwYN*y^=c-GZ)i(eE z>o$4n|0)_(zFT>Up}#zUHinqf`dsDigjhCYTW{CWC7OBM`Mbp4b6?%ZMnU=4$)fw@|@k-!eU7RF}VHm@zaU*MXJYVFY zQG_D|Ikfc1if;Mo*hKjvn^wQkxwnK{oz|YE=i4s|X;L``23NqLvK6E0)x>Cc+y{=% zB(eJ{!`Fd2P%l0&(|YGUo*G#Xo2imzZVedRZU+Y5IR|bi*Q z*)>Hx;Gzfhk%n3QmAkcFI15)z5mftbxN$!-54ZooLN?OrO@F zO2@nJc;Ssg$kL|^#$F0>!%9Krc)?F(o5@B|SWGM$>ptmZ5{TI$BR%5bGnV*Vwo_y~ z-yID2B410t9nE*hi@)1VVtG66PMS}0m_142O0V;4kaFZ)!x~@DjVfNkjM0Ua;uW>t z_xSO$&;88m_ac}OVq(1xpS#OFE$=t)6i*Q!<0KQp+i4SV-$m{?eMpVGuu#(R=S)ai z8I>ZM()NgPvYGils^y|*KAbaH>AeY=4MhItn#pM!+R|gf$JhW(N|l0Z5cx0En_>kK zD85zxnE5OsP@W2oyxeu=-)<9i-y<=hs%jL0a64^A8|vr* zIOs06EUXO_Zuau#|+@!2)BK(>R3`w?l8^z^oQw*_#ePZzF4^%gdFo5W%zNLs{BB ztUG3vK+~^X_MGFv9EU(D`c?c1a{Dbgst|=Vd!T#tb&cJm=(}JXo?WE4OVSUdcEruq z3+^a99#o?KK0rR)CpY9|bp&Kd2cy_`k|)MpHSX3|q4@mn-&&tahvB^oTW>DC0JV~N zJI5`j`4Rrd%!W*{6`BT@r;21lBLC@f|mM zd`v4N0mprc!DUnQsOKk7Ro@4vc~0d^uM!m7_fM6o050@rHG4_Y$B%F?1`e*Zc)Wjo zYreQ_0u|Ecfqj>J+^nk?c05fA+r-~)wxiIcZHB@=b3H+-JXon6Z=TKORbB5(_UkiQ z^x~}{kzKG{(Hiu-MQ%P? z8GJ<0dB(wdaJ3ApE&F=D&Nj2siZMZ*9q>$6I&)s<`SrFMoeUe}y%t-V=tv+o5bJ+? zb;Ro>Ir_qi_da`2M<$LaZtoWc{v;}1P|`=N=!Px<8ln22)dAlB+REbNf1ht&S1qVP zYcJx0uCaPoVve`T7fpf0lK(b5wUIBQ?wHzXZo6O&zN>ph&kwsLYC`;NCzw9ZCLUMB5kdjy}{=KRoJV4jrZA|K5GFb&Q@>90?urJ z2X^VE$9?U_Wvi#l9=B)Dx%aL%?tQOv5qPb@cCm5LLcS5N-oq>P<-C+ByZ5=nQBmh& zt~;&a8~fIiVhsH;SO^p`06{q38H4_=`8E{rqLE+BEr3-mJy?F7w-H9PZw!&;yXb&n z{j6gf)#w+QOii8!0v`62_VwZxlKPTx!;FsAI%_73J2>D-oP)qGN&`VuMS zOdkU!HU<6Y7Mp~2DHFd4<_z0kT@h5bxkmf^A1Lzz`N}@)I_NHOO2bf9On_M2UAcJZN-bA&Me> zriJgX*^f#qsM=P)B!dPtf(Ez^@0EKfm2?P(;~!U@B-gBVEwvc$%Y5GD zkm7=^MCHK^V$VQH*eTD6U%d-;*&iA5)hV~- zYG`WjFA1Xm%M2g)qG( z72LIB@4eF$x6$T(4e{)Ui57$GUTMF9#t@bk8o%;EqkBc+Z+O)9aQjIZf;dGoFq6xd z))%`4*UTDJ+Z}_~2gC^c#!o=xjd!_YST2rs>1#A_)UqcqxBx<(&*ojEGl<4ptyh3= z8y32X9$y8S{R@1B`~SdKI+zGa3OVb{zL4*Fu5#``E0` zJy72e%;xC9w98EX#~JcOdTrM7x~w7MZ^Iw*fxOi0U{`2kBNcRf!wSz*&3hutvWN$*7v|rZQ4t(>YGUqAhf$$2S6rm-k|z)4+M$* zzk$l!T%ylTYM=I4U4=(Y=cxwMl(%mJ`Unn% z(TIu<9G9f8Fku^I-n5NBUyO0Q!x;C6db6nbz4Pvht0rFWoCQH!3Hw+S*CP+!DnmJ6 z@LNQ)a1-vSnNQR+C44q|Y)-$u+gAbzY!*vdxV##^91NTSq)z+IJmZ7{Sfg+h=xOKcZYnh&(96&6Xg`2+yh@(5NyUI*7S(O9 zxG({bI1r6!iR&L0dLVfr=(#-oC3XXN9YLukaN7TiPKjm$(_2Otbu!97lJLVJ+33W- zj~{U2lxY!CI$W@dR0U=GjGR2fMsna$%HsYq?6*Ba3Mhf(B~gR)NT?~z#R6eXWB4G! zmFKh|?WJZ9mry0HZf`(fWfpMDB@p!1A~8-<2l1ng{_KiI4ob6fLlzmDoQa}AC#{1~ zW?t`jt7b~cpFbmfJqUDspC0ySZXeYf&U9K3nilhaYds1>Axx>J?YG6-Sj}LvCrZYl zs7EOymNkf{#6BS4{wO>lF*%HC&Dmo(=eW+}3^)MH=7DvHG>5D8Xo+xxSqh1<19y}@ z^8zKb__~9mv*J4YBb0Jh>i__OTQNJpo_<3dV6s)#lW1FLda+bpb+s+QJ@n0f8ZdBjoVzv6^f;kutT0=98RS2C_=)N5sz|4-;@5D7V3L^je{9Sn&_C)@Al>b!$n4)be*f2V zFruZgZn$MPch0HROA)0$+FvE(aoBwd1fUcO?CPX6&c_mjDJ#zw#cUmr89hnBHOVuF z{c4Xp)7vV*u*>Qf&t0K>!B62NY0|6Yrkl1j_>*`RlUD#cI!e4Ce9VD%yg;tRAxFqh z^T zEYO~-fadrUtus)Z6m5t(BJ((G)Pr4f232zB94|mVu^0^ zZFqZX8k3|$5;Uz9g1AxeA>k(}lIaJeuXwQ%lG-)Nf@Z8mX&24+jm{;f+)?~TywMSV zOVen$zZrksF2+p%s<(<#j3}5@i%%KB8?@t8O%AEga_p7t*Ro@TGr_zYHb_<^pXwdh z2wU4rD$9Wzr<>P|V=dIgqS2F*ht4hgt)+LiY1;{sF7?&1yKI zPmp8p#*!mVt*}O;S`m&yDYDsdxO$z8S!sxJ-KrX%M}@KaubWpeSHOuJs$`9DD9Hm_k0F4~#5n~ZZ?&JxWg%&Bo=M!8&7!mn`TZwU{=opl zb>`GGNjCpaUf;3IO3y4E>wdky;8oZiN++opmK@GnNY~_Bxpyes*h39Y({{by55WZd z9wMivGJ9$q-DW~htm+z&Yr5@qHnTqJ^5%Q*k&E@buPmWzMJlq(PZtrRG|-qQ4Nm(d zS{_H!Q~VFh+Pz+4_2m z-*Z1LJW`c&D_4K4!3bBgslxjvm(}9%{`34m28Y~Cgy|GZry7MAKJb?74ZYv?2DTDv zCHxNwh2-aN!sszl&*5MCg*Zxp0FhF+&f6kQz`|QxkYSFn*$DxcAS4DX3(C1sof>yl=H6}0i+3%2n-y4r#L;- z7w@VjELD597W;eKpiF&x>-Dfc96CuY=tG=dmo|yK$$lr)+ma^A=qJ4+VJ6Vn{=`YX ze6b*1lM&e!F-dPuEiaXqWxaTRqwk1-XD_bag_iWN{H;5Q!v?(e8lrF)dL01qZaq)r zvHj4NnlhLydiDEcLl8vCL&eQ2qi{Ev`NP&E3Fs6dsd-|PB{12f{?BwESwgVka}*=n0abUNW^Um`eR#9*?Vx32-Z&y6}E@L+GP` z#i&DOJq7vH@^tuVyl_amOpgaxepQZ8!Was#x|MEk0J=@*@=LIpsgQ9!m?QT%+f2cr zj6}Lk1tk3p3Q>!_*TD>|UYFjpO;-j0t`z-NLscybla36i#}Jgu5C-nj)BQ1veFpGT zVVEh2nAnxQ-~YbQ;#JOnb>ClQ|McAHsd$oX96SuT%L#pYv~MF- zNM6Az)^BoESn|AkQC|6QaYeiMwtIT-rm#i}%W58?^l#t*LBbzc`jPy@e_7Y@`S6e> zAI|n}eBA(KLZ-~K?k7&Nd86-Xqp1_qOu=%g{I@L92-=~-8b!u0i}d$gQJI>RLXlHz z_c+}Nf?@!KqaTT>b(b`LV>ne4K~W*!{p73pL#ly?ssO|tnqLBS5Oz1rzzjENwQZw8tkSpuXtXIpH0_uMhP+?LCwleW;C?GhxHPh^7;%_hN;~nYIJnnH zIGTH3!g#HjXuXyzV!m4*wVr`#6miX%+RmHl_3K2vbZr4pX?+ARKkt2`#81m+F_Zvk zr}gPRfRfoQ$J{3@mk;f3EKkd;Z0AunBI=e*a(!Op=;xw5NCVio?V%`r^k8M z6oM#A0jRTuj0W>x8(J_049v{dDOlq^&Qc4s0y5orU3&j6*9&X?X8lj4hGqYa6{P85 zzp7BBZf~!=*uYOYb9QFB7wa>{SFQ;nn% z(W2YuUsq1S#PFomY5iH%Yl;3(Z9@(o=5_z*mXQe;o{DD41%H4y+e6J_6X+ueoneL#o~L>7PjfQbA?&sc)}dzj!+&1oTi7ZK zc^X|477Q`!vyJyysW9oa^O|`bQv1xeiLA6I_lI65YZ4y&Cxr^X0IFIt=T*(``i9K& z;Jk(Py(-BM!epCX8xjQ2q>`|vj40eyvd$viD5 zqS0t^@ckT3KQ#c74*JX)0ZDwrh#`7J-Jnl-TJA2kF50&Y{ zP>Ad7ZuOQco$8emkX1>zY?>VeI}Mz8<62g;Xm!Cx$G_;bnA<9Q|2o(62XvlQ>vkF- zqWmRulCZq_pz#QXQ?%)}gg2Wi0>JtBFKzQE2Gfgzd4U6AQMf)0wI7E*?x;ZSDt(}g z)vry5oH5GEKrOUL*o-WD`fa}Jb7?PAl8LDvO=nN_vRPe1wUygIgiedgvagdp#w!5s z1Di%m%JzDGuJ0t7p7VK3Q6nf>>kmE6ND|mZ()7_>|I8?95(fid zJ{;dRU)GCd_Q^DJ!Sy=$uVQk(6hwAASCGyb| zaKtmu7N$cR2O7gx)NPNJe8!%#Q3p?{V(`=U9yR)kEpq{cYD^t0pUqgkHQ)vjG~M0; zNXS7zrCq59-~Q4K@|R5v&jpiZhij)9{O~t4mac)dW6D+s(+h6*T_b-%X6Z9@VCfoL z8odb1qjXE4=5gPSf6JgU)5-^`ABCGq-)47kT`F&V1hJYU6FBWXDjra}Sh>NDIGV|2 zuH&+vv++3o`leP6l5pG_qS39h!8UEuJB6}VD>BphfDY*NiMJ{07c~^ne)*lO6|nkr z)1Qqrx>yzYs1Mru(e)_vd>FYKJ6I+1+&w5MzZgH@upCpqa@d<9tn$8*P3L?tza!+U zJDH}q`V#`p04-TqoT-6?|M^AxutY_fN?-;)icw7;r8uOJ=fPEQaHR+v@miA)J{=sE zYU00uq!RGAwTnZJs_ERP!u0+7_or65O8{f= z#d$;9gWcB^6E1gW+>=gkv-*L8wXns!z{fZO4oRS_QTjMDFJkPkX>ef6blH>*z#X`} zrvA*&LOkwNk|+FpN**nX0)KN?i6|?`D#KW3rE%#x?9-S4<80DG48FR3n2Zx*W zz1@7Bm|_+`B~Y|UhPgQdD8PsE>H+{e4giLfhjIko?Tpi{vbr&f)XC%hdmrlL2q+rg zx)qSHf0jMlT*s ztyYMZUVG<(qltHqIYI!kE06l6I8(e))@)KLwqPX(fHJBVi`!&6@C&MfqXSW6{v8dF zWj`y_mAu6?efmqPCW{r$?Bgl04e~~xw$JO>b2VTq)VEu#6984|lcC@az3gD`=PQ0Y zwf8q@1R|;hGNAd7A<^dV{@@QQN-|RDK}rLnaBQ*PEN`Q7asy3{Uvt_nksY5#(uvJ; z!-0|QxO4P)r<&5Avz86@y8eBHL?U2UmO09B>ij!*jD$(FSrd%(sMXDMAd^HB51ouCwNWgc> zA3q)(`cq5)=B4re1~Bhw5__8tB4yL>cT9VJWz$A|S>82jc%#Y`m(xOqs{a1Tx(F;; zwp{VvOnHFOLoKBPVBLpKScIFvp%SK;K<(TMvYQH`NM5U(1Dx*nD{z{Tb})U`BwF z2?2~f-UI#?l-P`VNm)Eqavis|YLZOgc%^OYH)UHwPM2BxOAqBqH2Qk+wJ|9*2(V}M zq!{qFLXIMWxorkm_deoX%aP4b5^_aZ%~q+>j8`dgmTQX1w8?$G>F zq*)?pVp)|XyH0DJGMLl*(R!_rh4IH~mv<^M*@z(p;)lqR=>H%N7-FRH`jj;s){IqY zlNTE2ni(-9>MM)(+~3HVP0n2a?h_6?g^yB*WKt;u>G^;kg-Vl#3!n?nTKMj|nGA*R zvxzV3K2n3{MzPxWS_r$X%J3JBG3t!^71W!{FoSbL>uCrTs_V`Q$46^I6N}Sdy&K~q z_Ht@(C0K)GxETOR4@zQGTuRMy(8czYT#KjliE)TP-**uhk_!7Lk)5j{lGo&!o+rL- ze#C(?KVc!mJ_dgHWUq$XMTn{p!9IehfRRp|%oYtUz zzL4#a9phjI=kd4o%Wgn0_k1D@8Fx&s9mZqPPSDT`trzgHF5X+6o&m~-m-~zFwubc4 z;FStwyN@Qqgr0P4PjByXT>!#PCzhBv8(ncSV1OH%ESxfOm6WU?S`O$xF}!%?cX*-? zZFwouto43*DG*`NK4)1d;Cm}{8Z!IAb>;i1AaKs5{TfJLq+gdEBSYJ^7o!;K$>d2s zw^vp3dt7jl1OpiH-c?R=^;UV`4U%TEl#8=glm+l8)0KTxvNfjtiF%P+vQ(3&i)j6< z)b`b6+IH@KhXUk|G9Cj3@(FrF21?``NLz_1lh^vOon9$E|K8>TI)zF00cBiKleSxt z6vn6dAp1@EkK@DizjdYGyi>}&BgsFQY0xF|`tFpaP!JS`1Tj4!8@s!CH{kScGP#^e zK5HYB3iul#>i7wiVj*!`v$cx;knsYw22X3ynU74IY|% zd>0dXZ}Sxg<+Z5Nn005%jN@^qzKal!>$2HquuHO}QmjZkQ>BMP-7Z|i&gN*_0wT9~ zQ+U|h`FK;|kDveUp}<8Y8JY&!>?k0_J~li)@VvIEFl50)%ExJ5OD2`G)cM1-tSt3# z*(y17&sbiH9|E4&>SE6mOpS;V&R{R{LD|+hkJ)KBb_~+QV7Nyh(W*8AyzbdG8Ylt) zORH~3JjQBc($8S;A8biT|Lm6kgJHpk3Ts7}vRN=O7wW518_c~YsYWSQ>2fSqe38y# z-O88)NXTeL)8+{pXFl9a<88RqfuQVS9s&0rYRboCDsnIVq5Ub7C`7zufDu+5R&T7n z*G5$3@-8c^zZ)Xnk%NE7tJ_NNE}<&Re$M2j`*4yHRe&Tn;r!*RO11(izlUaz1^q#7OPU5 zr7AH1X{NDnnbrNO-qx0LoHr!sa(s-wh}CU;EZi>O#_?%?w#qYW3lKc6GCzIe|~m!OWqixOEMDu@$2Rb>Rxa1* zIVoLe)%dcc>a_Jl3Ya=-T!~{@dMP;#qB+cHlrMf}^AjlHe|Hc;_i<3;u~v3*UFuhU zMhZ~w=A}s!DxWCvYC0Y1+j6jtetqW=ODsgQ$7!Q6E5g+u5EOR7Bm3=0Cg_&qg*n>7 zcgswh#X2z`%}e+>5%a#ln==HksM*jykNSt^#@sT!hOcd!k_J?Yw}S)b+Kr|T*;ySk zVU%UP27cuUeiJo1omAqIHNY{lbNAWq08dch)o8g;O6i1x%g%(NV#(cUx!=_8-94c1 z?WnY8zNXyhRm1CE$(qUY~=SV}XmyK49nhy7^?hKF169AY<_|+XP zkn>bojKe$<#_V1ueLJjee)M=EB9M4#`Iy(fr@uN zRHzEPIBEW^n8a?UCdF!Yj{J>qSHSLk(cSf6sajKi+T@nkbTCnSh0s~IRyKBe@^+tA zBfp|mDCTm1nlFh3#}-G90{+KESIwi(9jc5ZW+rPY#tQyaIYa$h=SM|c=qnw(b(D>C zTc1~)c3OZw(i23d@MsGFL8pg3*X=(&G{vyMdWMpettS@HUAPfnaq?fY#sjqGrw15JU2pcGGJlUQ&E=PQjLXo2kAHO8@HLt3c20D z9cOP^SRRr)<^|61zQD`MEFP~R&u=RsFi`m$pA`PN1K$T@Yc*~N=tpX~o|(-qCF>Ec z*|8ccNSvScW>%{NRD*6mxP;ogFSd~ORlLCt)j27nUh`r+@{Ex{=)*R(swunGfily< z85ZN@$pn$_v7o9y^X0mPQ~TP6=Np)*+nCHjmak~*!4f4ayK*TH1=FQZIQ+^}(hAAd zk*4k}xVN)7<9^pJxUzR%e zVvt6dIyjVywiIG9f~f9L3C4a}F7Y>*MmM@a%wwuUy*`|$E^li&Oz~<#1^lh`swFXZ zrY=Btc9l7WUordyakv-qP=e#yCgrXNc#!g^s}~6c&PTA}!D!`>P?OaS9x;TKmK+ax zpxk74_$aohgyskVuMgU`y~ikRhDm6jVs2JzE2zSqYZEf8(LtybQhd~3R+1oS9-S&( zHeKSn*j-?JA%wf*U>9!Ae0R0F>;pIP9UD)71Q(M<5}wlj%I7^J=zBgJz34tOvPo!9 zc+qhxZ-~G(5_|VM!j0;GoKQ>OkA&3`(a8;@cx>O0Tz$& zxxJn$h0@NWv_-psiPpC(?${1)zjWU5bQ zYS57~)UYOC<2?P$1qRm|w~Xmy@3=W_hxpAmY43TfbCxJFWRf-SV)pcLZoaXcHaV z=pux3;P7A5(;Zmz9#J<0O&e>x2%$5xFKQ9MF;w-g!DS ziUX!$J)|GaKOTN1PlVjrm3^Hbm(3il*+$pk`>KF>l6rG(^m~qP8^7&*7HS-b9)t%9 z#tI^Lew<2@qlq5FphvD23^K-V^k$Ef$6sJ!5u#-PKgvpjQMd?pF| zHUV6nf&?Y>^h?r~R11@OtTB65W}hce`83ve$^$rMa-7i3f*8pQI9RrNL?pCezej*A zfO1VF%pbVA1k07x&%953(*7Mj9NaJybiRhHV)zCQhH>paV?>4~O zlLc{=#Ti}O`GH~a+#T_-b_H715DL&>N{wl9_}n#c9AF)CuHca0!27XO0&^#8eQVj= z9ng~=F5Qsb^BVfbW;RCvn9kH!;+~I{T3m9;3bVtqo}=!sY?#*z52H;GUtNf!1|W}5 z0vtfM&WfXCC}Z=~z4m9puC3))g}&YAz8|fk%>EfyM8pZtxF@#b_f+&@_T!gS>iirx zyzX(hP5sCY8?f2VNO&Q~Qb+pP=VCAp%*<|lK7AT`dhfrw(KH;TwDa3fPn#*KxGK%G zUR>yvpXXT@LExBJF|o(#g7<2TXAI!+r`DI$y5o?ibhu93{Q2h@)VZ`cpTOtQqLYW2b6-$KYpS-+2``&aTITpxnz=BX%IGP7=AK=0Ek3(-#r)j}g zNe;G0RjiGrXU3_CUGFE~-c}k)FTby+8 zK52*7{-+}x;&kx*FD7qDbymH{Cu+r_rM0N9xWS3B)#2d^cvA`H3iug9@hdmcTU>`+ z3w+keaQfME^|&~Jpitgdd8;p@?D#v51%3+X>mf*nuHp&TgaFPO5M=iwm-3^6_tUM; zcj+-ldZ-sZ)StoXcU8i}@iesPDB{jI;(OTYSQdL~cHwp10uf$!$xBMR7SZE3$1Nw* zh1d7g*W+myT=hKg?2WKh^IIi~Xwrn6At zP$#nXmtdWrE2SP&oCjJ**GIYCudViW)TXo?Hj=N9J2g98k8Y;5Ci~S(?Dec|YE;J0 z_Q#P%ukdK6^nq;O*@iVPC}`%#m6lS6nhPn+rT3Z$ssobfR14wZSdQKY6sC5z33eWX zS4!&eE}=b}B}9Fsu{bbq>nfL8733gR9S<=yknI=9Igs!?b%QQ!_5t39LF%*m8qe2) zCm*nZjRGnHv0Mg+%y!!JJ1f^~i%@%rM{EKQ{^@5X1iFrzwpG83x*7Y7?pV1|384zj z?W3Qo+OTv9981nHs$0vjO;^UtNqS^sN-7g!dO`P*_TpkZ$;IKAQHa-a}#N~qrfwB7C%9UEK{hU3 z;Khz_YjjsdxdaybzTsFOSN;C*a!|_;ngmVKH?5rJs*5Ayv-2DfiLUZ;#=F3yD=z`! zd~2mL0xQe3dC1`hwc!NyQL>P8Hn?SnPDo@#028dUH9deG;oplDj?Io9)Ta!nasNXl@h)VDA%JuTeuEcrmki@VYLr8 z)fj$Z+AiiQ@lMSDW5?x@kRhad2V=Zy#mEuR6Xp2|&aj(pT+FHpePeKAU!2;e=pWP$ zql)5o)WEVac$2nge&U<_Wl)Bqzoc1m0nnF}>&vC(7J@w8Ps0zhZU5vK%>({6C$~WS$R^tHbG-xkNpP$7bmR0v<_J5eOj=%~24js~7 z8}Lv^tb8_3OS)!qD*$%USvSyccBko_vQrzz4`TW80(+T<-(a+G3$#!7S}jz|RJ6PL z&Ao}Soltdl&bs2m=@oMOSvzy5-kI5P%5SqNMJ<0`uZg#Nx@=z4s1t?3kMn4s?%J{6 zr2Gf?1eTK^j1hTadsJV=r(J9-gA`tMT2-7=bttuviATEMRmT^}%H@r=JCxf+;>_&u z#hMQ%h?p-9>_zxP_@Y?y7kAt~Mp4t%Xn~gp%tIkILtN(Hx4oU0%xW?m+~(WW4f+M=)jSV@80WU2UQ*CpKD67dL!(|s zrS#EqNX(->1PO4ad9o08tfRV3^-6t2fiJ7xVwfWKR*F~qn$jnVOv;^9lmZO&r-tJJ zK>!bkU}?+!ZvHLUJ)mb7p*mIq`~FJlvmoX9awu?oO|dxsQ(cba1moit!^lg)seMO4 zh(r}6RZF1j10a)?ZO&vLThO6&F&R1^a6mSqwP(J~GorTrI>fkiLhsw5*_}wurXnA> zAg`)BTbU+jDhXb7~NJqzJ%VRm9YEamVk=#Dy$$<8YFqdZB94W9Kn|_6F z#W~BFQ8*7s*#D!ss_ z_=*K_I5hjoW13Ql=sJCSLQ%dyo`0j1%d@{T7u=(h-29W78B}jn{b*Rsw#NNxZwEJa z*k*4zNO+lL?}`p=67j3;b+^EeKNZy9d%f@)2ERx#N;R|xm<7f(ky-ZS zICpgm-y3H{8-;hhRHC9sZx+M>T%>^hGhvsN4K^S#uEqml;7ybi@moi3KYQ#>Dorqu zXD+19NPd85v2Bvl2;bFly@Qu=nM>o!(Sbi8W@b4-xCWoGMAs3L;B!(y4QRrZgu9C5oBcqt;G> z3^U`c>gp70AgVE@?NXIKCr#yDdE0cxQgDx8gk@(>5>6!UvPpPP05VZ>sM#qC=GY8z ztlYY`e=Dh=_BSjU$Q0d|UfOsgPwSXnb!T}9pun`f#o7IIFZZyfwsHZ*!3|Gfz#0xI zop;^tWPE~X+jqxwEa%iO--#cuD==dL<645vg@JW_H)`CrahFV4eWtc<$SuCM9BSCz z^3bq%#0sP2vmB+e&FnSR5xb-K}R}Lgy^9!L9wBBQp0Y-zb zx<5X3R>6SEkm-tGri7jbOk#aR2#S>Hqaxc)aloxJY>Bvc7@iT9+VJNws8#M{v7^Tc z>s@H<8YM+(kAZGbi>LKnovulo$mk^Ubkf|jgSC%speE-(A^?kUM>Ee=Qg>tz9*~-&AU)xn~pQ|&n*rGQP zcR-6QViIum8zfzvtSTB>vckHr4|Mm}PjA`o81t%o=8w~V%)lq_+lO+xE zZ(|9dC&-bblvs^NBJW~0p@s=rGL`xi*3a2XO;9=Tr>^ZEO0c1^`6qKa<$^nC^(p790 zaDX=LJ96rz)xo)NEG{YlY72HYR&nG``8uCXpc>-c29j?i;8w4*|_jOCx z1iF&joov!G6nVT3$;wq%C~-vNNJ)ezkT{5PFTC#O{Tzmy74s2OiY;3{5QieyYQ(AP z<2-*#qJQ5R?GQ4fR5-jaaBF=a7`(}~j7MUBekbjFZ&}F!UfJYriGN#r%CE6_Des`udoySDfa?uRkz0I;5E`U$d zs+1dZD{J8w4EJUY7Y|Dj0<*Y&OwTdE8#7GJ2f&Pm_XtMolP%{v;gzhmoBV*J@)Dfh zi+g6Hw2KC#q#GToPPTbX1LfGQa+X2twY~h_HZOJPa?2O;?z9b;iDeD>b(Bf68+);D zCT2eW!2bh!D9|{^8D4wq^VMq-se3tx@`g(pMBgd4ct2517Hg<6O>1xX_YHi1?;&9n z88zw2f9Y?Skve&NdVSq-)!)O^Ho62K?l)QC%OFt7rVoHNeFIgPe(AQWez$wP^7f6j zdNkST=no?I>GQ?v7!>fw$$CHL;2`9eX+~|sX>`z7;?&6c6ioS2QRUr&{>SLb@b@L= z9J8%X^+Q(S*jcF{FHH+P@#uOw&9a_Q1>Fm0u5-drnsj(hLj0hu=P;TbuTTjYV(^#H zc7fDi+JYFOQxJ$n|8XcNEVk!Idv1xASaQMVzUa`M%93fjk#wdQLA6px)wj%j7*@Ey zUn}^3UTgT5uswv;H@#@_oiGq2K;oNU2!(DqE{3$Lfo<-!*{pKcbmmu>iVn%>M*!HS zD{o~xzSk}pYO~o80*7f%_HBq=9{+T%04z+u&Uf>WfY@#Il%PT#mv)pBfR&!}_&WR@ ztp;_JKDwp*t2NKHrWdFqtq*Xr)qHr;t==A{O{Wl@GIuw(J)HT)wNfiEU zpT^kmUI1uR@`~ZnCulz`HFo(d_F_9!e|m2o86LQ>Ev*)yl1wI6eKvfw%RLtM}$$xE$MD_E7dg^*iN?xpk3P+w=UM) zirEu<3{T<`1@V`*IvGR={hDPeOpPV<9Ci?MRTdEP2~*mBisTD@wS_`}$z{2U$l~aG zYnD;jZxWH(Q0VD=Mjh2FS%A3V+|A{_O1hLu8V{7oMPX zEdwa_1=P0;kjasq_X?C_G!g=Y{B#S%<~43xgRXgfro10ciVOMQ2PqgD332Mxtp>at zg&C(wSbzaJ8u)R|mUw*&8;G3Zj{f$R@%KvZip0X8AP88Ay)(aT?z%N0q^^=Aqrxro z@HVdxcEbd#GOFc7F`I68LG8Z3Qu;>xLA3VT*Bow_*jIV?VPXCB(SNmZce!K=LC{qU zyZ|Z&$*+4!o-V)QL$dp?cwZ6b%W!Hq9E#;weoCsrHjR?2-$>!~8xS&!?M6$zK|)^& zO@`v47^J$4Hjn5LZn^jP>*Z$2aM9<1f-KjPYssz+%w)oG-)0V?r1gN5pJ0VuOM9F< zB!*&Pg1xgfRarw?8y>Q2b$6I^HOPucP%3AWh_pE8V5@yI2~XAXz@ue3_P#41ou2Qv zoZ4(WhVzwbuBZ$gE%0=1Vm`p`HIF9$gI&v>|L&xA71ZbnBBM3-l8w*c&{lyYyA zpIR=P*K4~TMh*gE$BI4POa6JCx79MtHePhpKQs`ba3cnDn0JAdK5? zdGvdku%7>nHCOc65Y@#73uPL#Rz&)B7g`vtS_h7S`fxDfhMX56Vt3lo^w9-~Hp=pS z`2#gPoJPQ*L-dPu@{n9WtU?*cV)R018BS#C4ACaCCeHCxjeq zb@DKP0`^OLoV#l{6wIBSObYv$o0`K}9s6n=tePEs3&300|8>H}`)3QYV(M-*wlFW$ zp-&K{6QhUG1JW^x3JOi)$jNE2o1vQBEEnx+;^6}DWO1q2h6nc{NLIJK2tK}d=R=0` z{ucOE;Q`*yG2NGCxZ&#n$pVaU*Oe*5E{ed#!ff*<#jzCrw>fRT*Ge|ZC4+;y|2^s z)FQ-~rg?D@LjPtTOj=!E_r+v zf~7gU1a{$T1%flDI~^(3Sb%*h?f}6+XSrt;THe%4)Ol)0IF0D|96T1aGd^$%TvU| z0(X!L#LgD`CztG>{k_K&0CnpVT>bxgvcDe_Nq&>Dj=wtf{MSGJLk$J~m;-dvc$}_a z{U47gi~-z%9s%TxlKzK-{+HSMuZti5kG;2ki)sto$0ZezP!t47C8Uv*Mp5bRM(LIq z5C#}RK|ulO0fqsDLAtv|q#0o76v?4wfT8(rPkGLH&-?iUzSsK;*Om+STKiegT2I~U zxnqI0?dlBEzd(6^mo?x$2LmIpwnp?b|8J6hc^!ayb%hxJZ&Or^26|Vpyc`s>`d=iC zqYf;Uq|b|p|7D6yM8F6M_aOaUo~zSZxIyizPPCbFjJdV1BL?+}mjSs#F~>FhTJ?{~H= zK-0*Tp+oIU-?NotL{yU>KX^{|_CMbqa|L)imEz9$*f*k$Gw)`@TEB@Httu6wM%AF+ zFQ0Gz*Xjk~v$Sj^5M7bsfE@#Urj}ns+~ja7`s?b)UyA(u!{;DbgO@S%0IfPgEoHOS zGAub6+dl3JG6aY`B*(KuIs(?ewnrpdEg%ZZo-I&KRGyeC!o=gJwfLKbKA9`KFZH?< zuGK`|vIPQm=F2)V{qSa{quu3%M+77aGj+l&a>t&L9fzFn^qvolQ3k~24-UEma1m2d z+zSp+t@>CzKjzv$Z9}0BGpxKGNt2JYpo~_C0PIRR>gSv$l~3n=+paw*lA`@W4W&ds+juK~n(A0ynXK3}O9g^$I}oKtHN| zymU2csqEt0!$v5F3*0Q6jIj!`V+--y&jO73rcti@btEX)>upRoQ=g%#3Yc<`y*rO2>aGgB)oI{aq0 z65vk>=?lHwcc=j3Y1L6Kn>Q(mBB~A?S;wfo$m1AR4>o1KVB<>jC=zPEZrb>LkNwH_ z?z_YG9Quf?tq|dLc0~U7WB@T@sH)vqU^rH2Di=;w+rF*ncJUKBWLRm_NJCj9doHQf zU~ny0w>&vZ17&_@cyDZKmh5XSXo_s2)G>-qCP1sqy5|l7W+j&G{hHEpFU{sipYxv2 zQ8>gZ_IczAUQ=R?yz&h$PSE%)SHFVpRdikp7^b7$`56i-}%1D(W069{;RW4O#>aWkg z^dL;U$+PRbR`J$mjDWR)WI*F_AMU^PTOkiW##BFHAjJIRs~~mxcl#5hV3p#xv4~D? z)}|ISG|=7j^m#CyjN8zUL-153)fE}F&S9PKP1b+m#oqny6nVkp^fG$@kCssPl8f-a? z3{PKt5J&2?)?90XxXg`o(l#e5;AfTNx<8#vO{PB9O`3m5c=5l?`%6 zm3H52w)fEGL)$Qb!-a82^M)&42cyEOsrVHoz9IbVPd6Th2NDN=dh;Cb$rB3A)`Mex ziW|4bZ`R+T62Efi$wQ9JA%gfjjSA|kn=sopUT|XlaQzGLQQr!si?^4T*FinCyN|%q z&9@OUw{~g>Q?{Bu=IaP}Tq0D;^`G^kKQ5tLSvOxtC9c`8-C^CI zp`g$AGkvw!ho_|Ltm0$+FIUcxD(OD28`m*LeI^(sN?FKyqk7>f!}7WCi)A=)fblG_ zsbY{`2bGa(^pi>l8xyILV7U-S;1)xZoveh$kSxH~CzaqND?U)W$Q*bH_X^WrKUl7) zKt;F|!@Zo7*?p!jEP4gmj%;oA&Y8;}EhQ9A73gweHzh1g%|e8!<9M8xOznI(OpLZS z_(~@&oo0^T>xrGQnt;viOCEs(Z+61fzpL(aM4lB2S@RWX%k8H4UF_wef9!XBFK<>@ z=PO!CnPnD-o+_d!Rq#Z5digehYo!U8kb(-dc}^(Zg~?g$+h`<<)Y`}P;sv33x~{ukv#qV zym%J%G?V8tqNbpVIy6DHRI`xtTxrj+&(PYY&E=)N*$V1NU!5YIS$3wJhq9agmO0-M zVMf3KprW3>pr=mPH#X}!xfR1U2Romtl&3m94vWv0Y9eD$x`OKnp3%tK?%7P)x27T*cA1P1Dw5hBN?GC|GqSI zZAo}3M!B@~et+Xt2Jz2^Geb?yS^Zj#O??JeYE z$%N^ItZHr^4&D^@-kw;$impJGnhRgixrh;W&qq63!;6EG63xN-2PuIc3A+)=b_Eyw z3iRJ|74ZnqPG&8GZ%`YZ97mBw4bR#qSLlCJmP@M$ehTZ=c&d8*b>v$Z0=RvtkfPUD z;Ea`tZBF69P_yCJ3Q=T9tqbkg=R8?HpS!sC?8?>@A}9Lb&Q+0N4Ibh2;rxz5J=wmZ zy94>N+osiXETp`R0sdR1!BR#FVf#%kz74tAWbq~r z!6J#Ys0tQ;a>s8K>#FpH8Bw&aPTcn%KrzaX(&a;5(h3FCsH>NTnXe7BN6B1FUOiK} z^0+TaSbmFtn@Ga`(ncXx+AUOn?Ge#TJ;-gcI4BMI)SrulnzbQkmgcC|E=t5(sRuKP zsnpMDx-5(q9KePXKHl7e`MBC&Qo}QY`<=hvTk}Z)DNGee;q3d^4cUV0?LFRJe^Tgw z)-hA>*pI{%-qeDH*vN(1O-gO;`R6qPSJOW^9NE`q+bHBWc^{+^eXU1UR)cQ+IAMHW z=Dj41YDyN)7i-hH5NMV0k8(U2+1`D+uNqZDtail8*6BDGLWtcz=_LzaFPXXK>Te+> z>TljB+^SA5t@P*;4nBqKUq2pTaSf6FltW{4pS za_TOQ5-s;xEr!x#2nQ=JYr3Pw1to{cqU{Y~VMQGm6 z^r2*-oovvAM+iEfuy-1n69KL6Ba23ZJY9;xs+R`TXcs-%>B~N~sSDdv7)i}wUfT1u z*1I%M5DBcMI(xOKA{CJsv$8$)!p};Zh1iXWOHi9C;ThB0CPp2!yPxK4w>&@6%$l@a zH62OQCx^CF{`N_$r3axXD=b^3SO2Nt@zmRX7jH_ZQM0ULvy#|;;hZ2;R?NY8g*N-i>YN&0FARa!x$cm5~M>m@`nRx{$na$vc z*Eclj3u{RRn+w*E$ zEhUzE*lbesT zwo=R}wG)Ak6ar_+qE*D3JVQID*UHmgWsBkY7Uy(6BVn?(de7PkiWoN~BYdl3bf01D zs5rQ>Lp@F9)+CVS6UN|9a!4WT%h2}L?3=KI&X$LI>7RoD;E-k&GM()fT4SHPqls=8MqQ+wI%yn#<65p zP#ls7+Yl?-II}38bbPX+qU6ed&)_T>ZZbn#GWg6>Tt7}Nyr?-fhn3ad23M{%hKv$* z^F#<;DcqTEJ{c>|Zr$8lAo3}Qf|NH~q!zA-whC2nAm^1(sB(pTenfsg))T{O?W~|| zd{%6|c|wqjn?Mvpv*Ayh^)kF{;X5T2&%kG7^Pz8e|3rgzy?Skhv@5CjT6oIA9rDR_ zqzaHqJHN7K)G-q)fb1n&AJ#{*uvjuJu$0)CjRxPKKj1qb` z{?Cms+kJNvo?86rG>X4h;sRBI_=doEPZ zL%WB1O_26-`3XD?e&A+y?U(ub`P2^!Tmi>qzgAljDvK>j|I}!#6*Jv7r==wwXZ=+z zX<4wePA%FT)vK*rt$(nK`n=%*Mv>o|MmvOA1e6nJ^VB=HU zybyY)Vx^ZeMeZ`Gil>Vf&6aIuqx&@1`(t=DXnle~ouF^pBkKz;tNEK1; zi8>B-?mghLRsH6wK7KKK+5Au?ve;UI_ zi#s@2(91jt`}BelUDX0Uf0^~OVxzMtZquf2!=v4hD0yWO6p1$2Jmzmk{x>N#$|A1D z)Iu^wX4&f|EwgfNa_LK``fbdg`00sPkk^EzO!&JU}p!$ifdrhZlgWpobv zrX)&RnVR;B$l!^)K2zU2OJ(HJby$9*Ujo#Oe?cXxO14ccUx$}ZmA8016^oUz#bZ#j zxqchu0QYB$?bE2YFq^8rVDIT|a7y;%t+2?qU~|h=aa;X9X{-O{)PC!8I<}zg;-`;p zd`5*(Kw?JIEJ=E!yJ^ao+3H;0(FjZ|h7+n?(k-?`N>l-NiE{kfjzM^YoqOORsoHaU zu@mWHgZW3c**&_D!ut`1sitpQa>t$Qih02g$-SFg^no~zreCb1_beyBhESPy17(pq z8zPar)H~7KIb;o~D=&05m0VorrB6MDjGnGLl?VMJXl8ohLU5;u9ZD$eDV@z@qW~-F zPMl@n4|>U^&uLp|aLZON3sqaOmpIeIS1+VF7$tLJRFMch{h!@{{nJTZ}*BQI{oe2Dda?PIfbl$(m@WU#%G zi?TJoZXN}-Qsvy9lTZ(l;A=dNv_m$#cBp=c9IN_}Z)|Q4i%nZO>;0ym>Vpoy$@84F z)>9@%&Spsm+)8j3z~^W7$W;~#{z{fwi$qyqB(@=I{ks#R*heS%#+^VEZG0pb+Q{s` z)vTRYqc=4yyKTHpP%IagIx5Iz&UC0ga*ygNsn`W zOh8`sZSNX;*0GiG#aA}Wd@|QWZ0>?`*m@|Ak%gDnY_i&!ql-t@VyunpUy^+FqK@B*&Qto ztwqTRi{%lc2G7@at~Vqqol&1>Ret0jL=G{fBF4-ZIe3a1rYD=~Kpu`{_Z`_^Ukn!+ z&JaJ!?YZdXn^!HeV*#=?CBfJ7RV6?H>{3)kp4OTphWJk0p zrOrSLf(`a^&h}c+4Iw3X*Ik=34^n=+XdN)~=KZ#{WRpP+_oZ%*daup<`K)T*%hL5N z1uuLoR~)r8JeIjLxl|4s%JVCe9(~3RIgakMMdaxw0AMb|HcRT$h!J1A2 z5;kVb<@fx}$FH`J%vxgl1|FMo2;>e|sd``-CRm%3m=cpR&fqi35IWVjTHaO_B%fZnerUMXqb$5Mq6sLmlH^(JQS>Yi2YyG5Wr^3Ch&WXZCCfEk%BNHw)?J z^{+GdGdqRbLxHm;lMt(9&+(WUt}OcnLgr0f1V(6o8yryL>U7nKbS5XHtxJ&V=#`$VocNL|+S)3dlVmq9!hAJSH4>Z}FV z!3H4++E2;S9ssus#MCr*_n2%mbz^+|mg5rzz?rw7&tx4<)c)2RN|;&Ek-&ysORy#D zah`FZ*kpAe?-^-H!uUTU9?8?x-2-D#nwA^derA=T6Cc^$oI{XJRwEzBi1qZqNnZR? zJJaaMDVGY|Q{Qdht(v!}=`h%)avx^Uc2RB0JN4*X80t$d-k@Wu@DF2f33r})hhFZ+ z`8q3@VAJh>OTpKiAPVnYxL}t*cj1(HgR*}0a2jJ!44GTQMcS78jBp_?y_yIg>R5D$+hvr%}(gMtvomDeOdfQS%ZtIgkc+ZWlj=gC{1Cq z+_}>O50~(g@XLf4ou+95y`8e&qikHN<}U?nVmpJgqj~RfNM7-4Z*ydGj27_HuEp_) z7>DP-drw5CP}|+W=OT#gyEy48s(HH4xx2UEUzld*m3Edqx>6`rO}N)=UJ<6tBxmYY zo{O$LfT&BxeKM`Pv)JhEUG58+*eo#@eju{*;d1ZE`XdRhZ}|sGe(7WMqu464u0ku^ z25Q}6r+vG1Yup-N?po|e!mIF?8xIH5T!La|<#NcJv(4dYZ+%~{v78|#Cy@Bxaf{#O zwuRS239kAkr3IYhlsHaRdiEB%fUGkYf+im^{ObU-!!Jv!&Fb&>qfZu>(~Bs=uXY?W zb1dRCW#&s&1Us+t7lvFWH*Px#r{dtwXh0lrPS~))koLG&#Iv*^AJiR(W?55uiKD@) zeDRomE|!%<-VYmmXZDSH>6ks9k6c&R1_U>=Jp#}%2znxn5&kR^o8|pX+i77r*NzIp7ryUs(;t z^NM!*yoeSU;}+5^ba+B}g^StE?MIo_gp%Wk(n%<2L<#l@{;v-Sy!23D7VioZg)nZS znN~+Ke0visQ@*9BpLlA0?uxTrJf%vXt+)`16ZPTkKj$8OgnuQ*y2-Ym{w~D`t~gP1 zQL32pv6G`gHg9)^qJ9OpbEsYdkFim5O1!9YBImUcvT3@1PF{8mctdu7#LyL{Z#X!l zfis|!K0Renwt^|Mu!zW4Jf57l|Jv;ArriR#OXU# zz&E+d$QVWabHcv`nH0DQpzmExMm$Q1@$-}?0w!aijq4LthGM!jA-2W0s1rY^HL=}7Y z*{?#`ucs`dKqkYyt_L{(^(B+Q_Iol#xfJjpTLE8-2l$i`tJHs{W`2J&g)<<7%+0s% zGXEunTq-7o1P1}NU-SR<^hN}b!QjE;TE_npybiGcl!a;7f6f2b)8iCC26o`gY4ZOP zLLLEdk|e*Q{I4&`R0U-4f6Mv*pXFo{8FF16&P4&c`V-w7nLF~4^sxXMwg-*Mx!X8V zLH<+MnZoe_ZtOb~W$g-)PNgw2=4xGV(J-sNEFo4f4%7#>fUdrpyca`nJkMinC3?@VmInR1#k63q8 z(yYbWd~<*-NsK9&yju~I$H69&zl!{7fT4bJ)^i7Ovk%8!p_Ps{+m@@d+&nw6GoV%EX7$>BHqxt{dtKhQEcy+6U|J$`%=eQv^S5y1O3 znTh6C!kr02L??=J*^qcKengk29b{K5pk;@Dhf|>ndHT83`b2uYe|M>exwb5Y19i*s zb~3vv&y_5_mdAJnX4XZRD>1eF)#bo@zxu@OyqsHgYBdO)es;%ak+}eCw2Y1vX0uSXalZg_dAlWac9LFU` ztd>0oXQMo5KAc+jq{}LS-1t#RoD>Ql1Ig-D7IITVWh+SRE{VZuo*OnOm~}w~=kdC1 zF!BtfsjoTdg-y}eWTsR$T&oPNY=*zaJHb6R91c%OlF4+Kv^@Xh`$dK5ky5kl7yPdC zsX{q!^$0T7u3Q{}KYYJyyy-H<;YmcN<;h}#`vAAj0V{sxqrr_3+82Tl*5NwzLn>m; zwZG2XOl2nX*$pDNT%R+&c-%lfcik5M!ZzETV5_*?0JqciJtRxH2IrQuHW)G0yn zC-nCsMMwe~S+U0kOV&sUU@ShH2oI>x_qN5PAU zmsu7er#(36!(UAzxzFK0(pLlD_HH@yP6jSp^>*LkS;=6yH$^M#j! z?ooBRNb`NQQNFS{4rl&&=+zeBc3kXV z>g_3|URuR(lj`UlnoTZ4MQ}^KOO5!`cv}(UoCtp8J$eWh>xwb{8 zpR*3~V2|dq{k-<*=R7JNC+?4a(dgN`=g#}5fr=Kx-$1@QuAVsHs}(Kh zzx9Wfs}rwA_h_)5tT00)iR16>|7Gh2xhxolh+h|88nu$+z^* zssE$COa9Yq-D&%X{x4&kaSiY*XyqOL%dOZc0UTv=8Oh|xKR@`dDF6WhEnulL+_I?t zTA)8=3yAOkhQ$4U3s$xuY)TDv?Ttgwri52#Fae46{OQqkMu^5}p|R_X>(TBK3((j<`0( zO}ohjqB!LbL(&qP*fd7VmUOqir_Gq1c4+wobn;J>fl_R@gF^2{PY@I}Ru%RppT)Mn zSa=V+8?!KL=3n4dX*a8q2Hwhj<8=4^uT9o@D@cWr<-~ObBH;ENUl5CuoTzyg`=z}@ z875wttygHIRcQpO{T|N6A|i4h1+ciY+&(`W^HqY?Pcx||3-u+jCxqtLOifn#gMnV; zssbevJZ~Y`uyj_mkHHIl*LK@i%sW46ek^1%o|W{GocrkX zc{G1)Eym8>wijm62y{9jE@nX3)RMnfR9&?BXUT;f3{|evm;D&9VTY3fx*jSE z@G&$FY8d^QS%7MN%P;<_kic!2rJT4f=X;@z@N*pjIOg)BN0qJTZ+2EZETdL^;S@&% zPzF(PC-I5S4m_FlJ8{l6D1GkU4me181W)~?yNLtQ6v`J2R|B|n><3y_#KRXI0kcw9l9j~I6< zeYm1-+<5E?wO-brs<1Er(%#-fV8c*OY%?;Sz>}if4qRYckNY{87EQm&q88fni!X6;KtY6u5nLE z`C_lLDt9jPq%1`sr3D#>DW!D)jJBMaP-E9DSSF)d{g9DFzmpqxGQtVrBjYe6)_IDe zhC}SFNofSeZ5hfggR!M-8{2~zmmEAU!+*kZKGujQocvd zpE3Hj%j&AfPs&2IGjYjjdKM4!ljHT`Sb&GL`zhtiU3K$!$4ckeUP`@^et+)Vx|t>5 z{Ap52N*a*Zp9ODM1zQ1%<~?bhIC4@I=Ch8=e9qfY zOYUAPZr&0!DRZcF7|Lwd6(%la@+t;6{aSMs#0S6&#w_pHU#!n^%;J&cq z;wh%bz#}F8-Um;9C7Z?TD+m;U|iGdE@fbfJY*?88<-b9_xRCg__~V z_wvaKXTO;LES*zeUTt=6h>40F%X~tzo_K@3&1qu}XspYta%ioBsT|YkipidS(#x{X zQEgOqNIr2i$Gd*ojWD(5tC2l7sU4~d?Y}>^IcluB{dNK{KOFfgfW5;zxZyb#+m<9G zERvx@(wi5TT%gO{#}~bog;PO%vl}{lqdoN!l)+nlxtPmzGWKw)qk`9TUu%79U|!ri zK5fEfwgGB8sU&yp4OI{hURo6{u)FiL>#G3jmR7?D3$+jB`WgZuJ)(2(DqX7e3SXra z9&|@1AFVmMc4jAzDx-)$L`fZF-DE;Jj)`~#Gf4RkuT5m{JM1{LwB%S%j}J1h=jx`* z0?mjI-3!f@`_I~ac>t?=+szS#NgKmJ)CRuhl+!eX#(5_y3+)F%J%BE1M1%( z*Xy?%?z%R^22@f07A=86?x;S|`tpKJxiGS>KzT;tIa!bE^b*?*nBM59vBE&Z%n-H( zdJmX)?>=s}S);FFR2nv`=5qg=`Qok)M#zRvHbbEitBlRDxnNeYD_nYaI}`%**nDp! z?AYaa_vxG2FhujY5zvEI(uOfXU6rrVsM0~=q%H#|{{nn$&%V9x+9dq2?A1(#LF4W& zSAVKol;rN4hP@&FqoWmgA+g6w|J`cPwu|0tzb7KDYO9q4750LtKCf3E{xc8!k1GJL z1nJXkVvU-{sT-Ei9875ILGz)gxBM0ah%pV3NdprDXJXv_X;90t5hg#g$Og*A%EpUkoXx_fe zqIwuIVVziA(!Gky+^Scd`rwB|eswApxA#23GTx4$bZbl3auXa9kfe(Q=xrR%K5#?M zhtCGP#i5WtQc+B81ZjQ(^WM7Pmsy0}=SQ}M4d=%_9At6!fd$^%VN0%pRwy;VP_0*C zUcUDW0+ZfD7I%Q_m{{3xHFF3Eq3QI3!p+6O5 z)0wfX%imbXsbAzouND<=dU5${FIn`24=sMT}x0TB$v9~ zxWb#0Q`|oTswPUZkK&M!8b6T+-sSV8uT-8*KkuPsV8o{AdQ; z#MozT*U#4qEvMSLA;&DAdC#EOCzjco#R*}CGnWQP|wTs*a1s7{4GTqp5jtB zC_Y^Wa`e9Mc$g<)xiwwhD!J+JHL*k_=P_+PRNsiYT0WvP>-xz$9RSrl3x#%`6FJ5} zNTbn@&w^r1$HWDD<9`O`SvpJu6s<`~1zLqhx;ezK&N-Q-ewcB0g5qIqwOC8)^YbOV zO>@YZFb?D#AZ+VeYecL7OxE`Ya-SiMOTOu^&{oe*>W1I=0n=)c{*=G7_Ciy&E|B=o zd{6^xx<^Fag=}JstYP{k=&0M6=}aRCQ!#q2)3c4Ik~$97o#fN@C|>&P)Puccq;0RP zu1W54PR@t}>TZ{sd?cOQvKXpCzBjtZ*aZz7WOe&%U8FDxv9?RX=~m=pbP@-~IVMX= z@k?z{(g`ND*QKMY9p`4>dAls&zXI5%uX)xW`mIG;_C(6H-$yuf4p5t`3`$x-icYnK zF4?e?Zs_Kl%}kEePUq?6^TLYMlWGh@!XRh+iD;0vlHyn2gd#px<;QuKKdh0mvYJ+&SA-N$*b3CqqmOpB;EE-v?QpyD;@yy`>FaEVK&Bwbj@bLJR=e4<^; z_DA+2OS>ga#%Tw`XCyokR3}ub~q` zmuG9xlZXJ1@EK=ZH?KeyLE%8mmkefGecPplO;oM%Zp(z~pY!h@2W%lH>C8 z@(`J51HA~gLLvP5)FqFGq4CHn z@gt+Ez7gk5?MKIXvAd400vAMwXMC9*!tsJZ?TffpTV?0_B|mG7_>^znig_~<#T?&# zydmb4XtX(7ImHZK9At-WW{I2&1PPl?hu-v!)%t$xcj12Eu((_sLmY=zn!vk!4oA{i zO@{PJiwbPN)60JRZQB2in&SFr4TQmu=gaR;t93C6eJPLJ2}nO5=B!ccV!MF;%5jy= z?zP+eo%vH0KR8yJ%NKfINSaOz!t(SAzt2`yfdu?6h<==r)Z#h&ZhSb8dO*2xl1N%n z?D2d;k)98TEx@AR){9JuxK!30p!7xcYES*ex;aH1msM)_tF|&*n;C6-U-Ua~C$n=G z>Zw`VigcSMz0~rnR{^~10-ps>^RSvCp~ku4okTdKG{Qidwt+;R(`7b_mc`g5`0bW1 z=UH(!<=%QY{5alZs7`1}3RQl$c9-tAT*eh%3dn$WQ};Jk|DU%SKO3QkL=&a{h){Nd z1L-&1FL#TBwm4lI&cjDeFZ*I+4-a1voC}3qf>NhBbWbK71zRCjB4OvpCyZPQl(%oE zK3w@WBpV4`nc3qH-$yrEk5R@7f1Oo10QDr4dt|a2kKY(?c7^LY0S>-&J7&3Wr5rv= z4_hb*XbZ*J=F))O))r$|F)>NiR!ynWmv~s#uAE>1fn8wdAh20$qY@ts7i`zQ(w*}` z`3GU;om{Mtoi7yX2zaDEdiJI|vLRM=lL(l2QbxIW5a@wc+f!22K&RIQsy64z7F?!; z4zZ?*qpCl%2?F&kP5vjSCmJ;`J}dPZbzQzYCjbfTOvyZ+Zqw2B#C`pz&yb0Kif2uG z1NAM5O+q$-U7caDws)iK8VFB_O%=EGqnv#n@2+~?-q5jt*M>oU#-xYcwS}F@-WKcSS5g&6H%Qo%^MGUg zyaSY~KyQ|SJzAJ4-aP(h%q+HpkW8g3*9LdRK}}tf{`}>_d!pEHmnCWqO0V~4B-?1> zHrj%RF5nO-m!oNZyuCk=MR4!V8R^btdVPDdcfdeG0pZayGul{>bV!<~b;#{Q$&Q|E zzmI#3p-%R38C87EM0vQLYQ?3(zxjE03Af13elBk`12h4eI{FO`KFId%e4)Kb5Tdc$ zEPp1})LVSH8UkHBh@cXa@DJ**)kCPD7uDt;{Bc@6ej7;G%&D8lgq0aU8p-yNMWQ$D z12_6a%|3L;BhMolS*u~Wu6I!cUCBcB-!U!bjQ1a{jkz*Se(aR-7qu>#`oYDjHtn4U2ti*sL6EHn-KcFbb{#1CcsHHX5eizNo*aIZ?3K}B# zin!_m10h2h8Ev7B+)~?3Jy6k&oS$k}4ea)e6KgVxEB7S)i@_ZxEk z>*r@Dg%|v%;8pAUGe3&9hZSc7PI&SQ`kz!eSbxqsO3uL4)M^_QPJ^J?EoYWGcb zzHu!)V<%NRmYC{z^dnE|i&2FgFF@8OK#6H)D{j7j$D$IC0X%KQqk3_d;m6mV^q?|* zbDutO^Y!66MIbPdtsq`~0g~V`Zqjlo^)@L!4#R`($*`gaGTLw=MvBeVXBm$^fSf+R z>eBicy6s7Ppg6hi2e>?1l@8VzkB>3vAi4(Xa(}YoKZBejTv~jT#du(lde6b8L;+zU zKWhFe{Vq37StrCV*X0@F1V6FkYV7Jrej;9WP#zh55hd*IdP@+V-g%Qh*n06~`zPPv zd>o2FBQeR$4y`KwqfWv;y1$Wjyy@%&$N}BXdKPl*4ka>a^taiCkn)eXzR2}h3uE-N zE2Oixb>eS@>6`hquU*S-%-mfjk)+FdR7*U{nGll3_?%#71H&jP)iuopnLIi#9zhmZ zAB8?Wn1U#^`A$TLp*)s?U$DlPl}|WdR!~G z9MheNeDIo;P;eW!l?C8LsX{D7+m)E%li8AIv3uJWFU#-`!ePpZJeVUN zC%8eO(M=8@v>4rr+iprUEDxw4KWbb80 zk##lbOxCZJ;@~a5&X6liuB1*>tC$_%dx1LYut~OnNzrLeW#>PdM9+eK-x4MWp-XS8 z@)uSTiPKB_Jzh>4xigasxF%aOkEa0gyo4NhfDz%nVX&|JCZNLM+;NRzFjb$q3M31oyD|+hZVf9K!cp>gXwV;;C z8F>1LyI7y-ryl>vsa>{RdWMmIV>0SkavHsjX znJ?O>w!Y&`uegF8?yFXGZXJXsm{tkD$;Anf&5A;Vj95@jGWAzfSNiFLO(XxTneBX% zl}265R-~MZq-SWnnGAdNGslKjig0bjL`ohVF1!s^rsZ~Lhhf+E$L69V99Ww3(oY() zs$83D^XbMREi_)ATE!(SR;0IiDSPvcx1FMM^N4sGvF8fti3OLOajXp)&xH^W5O5fl z#X-C>&qVaic`yM-Pk}~fX4Q-9D%MJ$U&cSRuTLiFXz6m(W zVU`Pfb1<~4K9>9}|NLQ@h;z{apUq~@2&aX$qLql#ODOV%9#0;ZQ37XtgXQ%R#GwX| z-ox5@g&f{{+JW3DL~OO%RLwW3wrjCwG!J&%1{L?tVh<{O(MyTh*bwq# zMgdID2=%70ryEQhC+fXW8~;=#c>~46OiGon9}(~I!fA4K2IliQ5$e|`=n3A?Td?CeA<_SE+{mMx7r0xE2)l>p|`s(E8qockh2J4dan=)z0U z#4j~f9HbRa&BsjJqO3Sni~5;0zi)HzjoJqM+UCCN*pG#d_fu+)+2o``z>Tr$rSABi zW&?!-lx2Jj;p*uNMnC=`j^l+?F|88Whqf{wMXI*PnLkQ1TB|O@V;lh>tN3NZr&mk4 zb(p-Bk{{FH3lz4PJ9-;o12mVeqXovxoSQ)bKc2q@F97W~?nxW2V1Yol9_lTq7f!|V zeGgZ;kB3p*Q^Aw$XL&AMO5ZRYLr(XmJk)bd@&nL5L`0@Og4Ix3gZ@_+f{YvErGfw` z-Lnffqd){T@+6uYCFSli&XeA%Ny_UoU87mF{nCpJn%_rfEf>ntdwYc!{$Yxi8@NKV zr<_PTDmEvC4kiRRray$fr!_LCxkgvMox%j=Y30HV0uU1fUd(^huVtAU0EpG2h1=&v zS{e52l}r>=LG#5;oOLgQCC~%yVaEp^wy;=d(g1>#h>cXCHxa+ECBN5jDcW(NUTS6c z0ua+i!~~vMKd`7~b)Dn4>6 z70CqegP-hxKW=cgq9T_8-Sz_)&O z?Y;Ti$_WGkf0856YU&>Gx3L}<1Cv|zubQU$=^e`w5~oV~uqh<4D%TY|cq>VOx@VQV z<8^)k`zW$;`)$8c_>sNW%DgeB2Ou4uKesF(gg-A(3TLDv_-*(*%HMVd7xY5P+7U7e zRFtth3rAnx#(n_B{;h7z^!+E^;r@N}1s$!MXksgA_!}m}ExL(^1?#4PouNzDi^cP8v~d|c=_s!eiH;rs<#F=;vPO*&J#oOQ2xrexL(nAPs0 z+TrWZkpZFmk-2ogEkzIo3D8OAQ6!zzvyo&`_2Uq$iSiC35Pc52;T!mkk-sXTgX{pJ zqW8nyc|ohanP*>TG=IJ=jVPv_UXm?`#vSUW4(IyqkF$(rAujFh^zk1%t_NEI2rUI~ zm>ANpWw2$cE4EiT!A0Y5Gt`ll;hKaLR02p1bfHbB^OMKXDj#7!?(C;pr7~Lzq0BW}!-lK}TQYHJc@O@!`@;$L$}>F_7ml$x##IUwCVHU`{Qm`RQd%3R|(_7y!M%`IiBI;76 zf4H068XuU%g01-q>V87=fkZJQ_xfKp%K^}=+@L5|tA=_Z=75Gh#9O-9?>@x?p(K&H zb2z(Qjmv1lZf?dLjsd%|m(r|hgG);MCb7&USJfyQMy*`%Nsm5~Vv?GE_Ot+5VRKy5oNh!0aDdaQI)bhNooZ2PqBhxg ze!vdXD5Kx=@1`tu#Pr~Iz385#JT*}2{Pgk<3l5AUP)QMGZVO9|e97|IWABG^Dk6P6 zn8WhZP3{?^Zxx_B!De}x{r!a^2a5GHFP{95r*M(onlG!hlDsneauz+AczNAp6J!?T zPLywzy?CqhFPA8tDxtzFAJB9v6#rG6G)&+{u-O5Oflsp3i?{b+D}s{6#m7J;>7k!d zkh^P%>iqw_ALI(L6~*$89~_Y&eStEUX9bJ=>^v)vBuiZ*uEidUIh;j$4V)T?n)=kl zPp4Y?7^S{Ha8mxuL2nV6J<343NgLWJY3=Irjp*TkLTKz|iNhqm5q31YP7M89JP2&m zh4gW@xL2xF{2}c$1v%5nJUrPRX z7^gj7ukA7Y&RYR6N5y&e9uZiv$3>4|R{V z|Ki8~RkTS5GR4a>E;a=dE=9-^cD{NWl`PU8#Tr(B{M~k7C+~&|1Kq#nTpc84LiTDq zt#@+1jo43^O;!_i2&~2y2`V-P5kzvX4C7)MX_$;Zoe7)l1|(pEH41cY&pzV$dtLm8 zMO`EDDqYS z_96}QYz@=csBxHlM9+;>evtkQIFT>4q>DOFQ$V=-U9mdl zL@l0!iT)9V*ztU&;FmDQYSr(b>yw0BrF}8hO#gS<_P2|U$NYQu*bYM6)(l`YmO3x6 z^w)opc`vs;-wF`Qxx@4I_~HPPVV$aCImlxD@fbkREX5l8$8*uisrZUhn+XM3++cFDf zwe{!$TXpikl`;P-ZR#J)zGWHBME=%k2xcZ)i}G81)` z%bzs`sfhj4H2^*4qK_tB<}*7aQfT=#4a$0uYe1!r0#5E~22S$?E{>aW z?uTZs6zIY9HmdV;CMQrhWAO7p3mLNn5QY^sJ?-ac_HNUD4*Gtv0mB$=bhxjwEd+5m zYmc}atLlQo1M-|ceLb*M%EIGrvy&ff(^n{8male;Y4C)9OK9(PT&85p_foqtm!7|~ z>;D(SBv`0IkC?^)Fnej@(L#_|3aZG~kxXicL8WN`V55?G+&5`BC|(;c1SbSbMzI92 zdLC8-!q}i2#b>VZxuC+@DG%A&jY2Fxy*cK9-ny2IOjspZ0HnEc;QbYTdN@;*wnFNE zMnEGr|1qsCN+hkRwAZk4~ zi~&V@^n2BqKytM28uGlRY@oalmX_kRgXBx5D~ej~F=j%XIzFq-A2m&(*au6i02<k_FU8z7J--f%(47jxel4%+#GPIG2m$S-(|0Xw>-OHVYJJ}+T51H#vt^^As}Fd%1CH;~?*ku5HQp9yIE z6&I46I|PFM#Xt4q0@@6N$N3Fs7D~?=KhZ;7St1beyg6>Q4KMpjFTC*Ljh)lkmN4^) zgIB|>!J2zJ#pyekFUP=WFuh~amnAqFyDn$5%gO+0U{#(D6(Xz zXQ#C;R>5JZTRP^*eD%serWWCF;}7i&@wkffg2*hz8y$}Cmvz}~PBa(F@A)jda*g?&epAgrmSUG8i`B@&hDdNqPSj#uR1v9lK+W#d#DVi+KE}(Y zYJa)`E8tc99M9}!o3Hu)_pW%}5KY*@0C@R4rQFAu*&POSAu+o@o_$1SarV&f-K2Ay zz02T7!FJ#a?`ObI4K`5js*O7y%P0HiW z_h>f9seCueBv!)z!YThq6Z>yp>6ZljIxD{BNMmoSyA9@+1 zMumB&d>BHMBADO|=2J7QuWCHwg{_cY>iEk5{_hiHl{)kXldi9C{VH1oAWc}&oiJQ= zJg)fFudcF5b(zS6W2d}j4^2YMW04Qy5{ORg+>pO#B|51_`Sue6IAPYyfb53vC+qM6 zm#lYU-W$msEcboPcSm+k=o~vX12|#A> zBi%y>X?s?8#~!F1+iT(*N~JJP{mNooT zB5pO`Bm8#6K_LMu8OWfvrj5)y@fmTgM(H}B@>7Yu`LPCU&+By}Wjxj|vbK%j)^}9y zn=IqKsoYN7Wzqx)vq`^E`X+LM?$N-6qUhuH3_L(onLEk@Ez;gjAr{#ZLP5y!FA13$ zIV5ylKD!ADkmi(jh=y_#s|9r;S(Ajm=9{0*465Es=<}Ov z)GD_cfD}%xIF({{5!{@qzM^WiGya|l)$Wf7u_t{6b|D>sGC#*MD?}?r zTUs8e-gI06FY)Js6>S=LFlUkaUTHepnc#%6{a3r1;6UYs$68OrPBh$pv(Q_ir8_!O zpR;3e|FgUNj}k!Q6|wZBQy0C6M`u!vcu0)xeC7k7E9U%dT}Vuk%S{d})^qFa9NP8l z1m#G?cL>bA7s>*<-Ow*q7J;wUuds11wb!e1@QXADWQQUbw?7Df%}*9^@O{0%54TJy z)oL*~Ua`sd{1M5lXue#KNgdqhaaQ8>w!jD*rvY?5WF+*s|GMcVm~im{sWt)k@k3L~ z{@Zc1p|<8)_T}nhrA@3W#uYvlr)O>R_g3=P4!1H!Cbc~LHbx0{m*wJlR_e-l+(c@? zKr3d=kiv^5yyR6eolc=Nbz0Kz@YwHTU!?pokvjaqlR~<98t@Iq-3j8KeEbzqN^z( zpnE0F*76*JR|FFHM*w2t{Y_dU+zucXl&)6I1lM}jHe`IbdFIl$a7I|P>6XE9>K+kL zSs~twzhHDzSclK)OD@n#N2VC0O_Fb(-$uhrN}E?;EkFY zcy$>xs-v!Jf)ICUtPNj~9EE{` zVpTgJiyP=xQ)v6n5+D~h$YN1cnY051S_p=4F*EF+E9nSg*malqN!mX zGpkt!Cz~I+yUHwqV^Xwy)f%0KUg>tFLB;Tnd}u?8+2CHouVQHzZ59_A3J2(_g~%Zw zfWEsIhG874`I_RpMssdw8PNRdt(at~tupjJ+Vr61L%a&j`A47==AD1mJ!_y*GFkXC z2T;Sj4m|eA?o9=hWHkzHhTRA|)La~eMfr1Y=Xp%5k6yiNbTyd+$P>wYhYOsa_pNLn zirX&EorPuPd5>cXytZHD3WqfR;R60Vd71TtxKht;mFbsR6t3bQH$`6N2IK~*)}0!C zlgkZ5L;&f_E8|s;aM#B`(IF=FFd5H!^5*tA}5y1ys^beY5iRF*MPQr>lh@i&jD zI=sw^4`7GoTJ_)IF+CM@k-Wz4oxMnVQAaaWpHD|$Cj-Y{CR^*WO!dDRiTZ%`DCg(k zy%+PG=pU)8mjcs^AhVa>?}3JPIxUn3@F!|DyY3#q28K=0xb-qpQ z`1`DovAS3F@z-TGrRX-PZ4bl5kMkX!ZLX%dc&^HHf~}O z;|Pydl-i3HaoUn7wz(s$Rqyr4dj&0MKL2Wa2d<(WBC5Tqtc{Dsj>-(IMYi8i{Y|$< z74oI^-O1vuiD3^y1={Ag$^eq(F3N;{fdjgl|7sF$uhweNu75t^aL-AiruZN^}> zRoB@JBFDP{gImkUWF7`ICAcR3&p=K44k&U ze-{?@Z_7qX0;}?f%540rKo#4C;K8;t&p&JfaLzox0a{USa@s+E1Ec=DB57uTyTc}? z^7Ic_@NZ#W?lQ1y$JX^4!hdW-K-1&vg(g;OTgSiMNCy+JDox6r!#}_0Z{K|-1|Z#h zJ$g*@4}b>1x@dq^nOoEs8UA)1e+Bw$KmtI;$oh};2zb)>f`L`dyxvXX{o6Cp03Q37 z`319oyAdWy$k$Tls$Ipk9$)m8S5*x&RpiMts`Dr$y?InR>JCU_ zu_DBF%e5Iq{>_oSWes=vmg!}CFy9JSKp3fZPI~W32^0d&{{t8#Yi3J~wp@y63F>B! zeNDX~A^e~~x1yPw+lY6P!CgT?pZ1-ILk^dyGkr6Dfp)TCVbN z4roJyiK!o0uEWnO&1>kXekn`8X)zTA0S$BO-xD2rhUmj?9)DV;e&CCPB}jcGp+a(R z0pqke4HN3=-2Wxu+_@f_&ooBDO-}sdR3NT0Qs1FLer+_Lu2^Yi<)kYG1jc1p`Iq1Z zIC}?NXI^JGx#~cAYy+}fT{Yrxn*KJ>_TWKoJvJIb9;JPm!gwj?5Q<-ghg> zRPL*|_fN!zWS)MJ4{zesFJ;a^h)-))^MTR`a9#+#-i!IzV3` zmcx4U&3>%|)`v!D$MAT&O{mmm$=HHm-~-Ak3?8MD$LYS7#HHdA22~mtzQeZljlT$?+5s!KfeuT`XWR#v( zzQ>nx>p{J7!|4&9)SBUKlq0+gYr!Cj82Zxs+Qs+~Y(AVf%yZgvmB)I3S_MYC3fk?y z?6L{+Jch+pko&2A+M{9fJ;zQhej(v5US6gX^L^DV&}3XO{F-eo$v=AiaeuRyQrM?{3&ZB+yE;di*1EW*4CV=w@UY zpHnZD5k(~l!_N;Cb2am`wRI86B@5rXicKALA$ka3Fy`HXwtW8=nZl_y20u9!5i}q2 z2T~7Hyz4V+Xt+2qQkdk2_{f^FZtqtlx(oE%+(qo8Uz&USd!Tvl%909hmZ*(@oz{ks zJONFrBOuL(hd;g*;7``$44Ygk*cUn@uQ`0U+0UWzl8hHG059L{tsyG|XH z3M{)0bnats6|0f|Eg1rA#+mDiV7`8D3P^U`b9J?CzX|N951%}ye@Ih1zvx1_zN6&6 zmcqq4T!fA{-@itC^4$qJl>^monCKN*Q^txWXz%&BZz*bB>a36mRU#9e^3kV;2Y#Kfb|m%F z)T^rfh7OMre7Vel^xZ|PJiKQj={g$|NBFsMV#!}-a+>6%0XYMOmRpUD6J0+^`HJ(# zR4U{MmTFADQ!6@Am8!kn1_rU^&{pLaHGF5Eag{X;X*jcKw2bB0ltS}AwmZ@{^}Eb> zlh5Vb!>-;zg(_&x!R7cDxUwn8DbMLhLFH$Ig6AkG3jrAm@-Qj~ERNFEY(&RhhTYdr zxa5mcq=cLT;Za#Ggr z@>*hNc^kA zP$|vRRCTJTJBNf6OkZ&zM2KcU+}6EfM&ldqblDbSU>XteOUnJqs~#>SqRiQ@%6d0OvKS`~ErQrM73iKmi~F;C=`?>kz$dL0vu6-E))UHE}qy z1|f<`vI<9VB3IDOHwv49zdAPDte1WI!K-A2UG9|TEguwQ?!r6^;rD;Mc6Vliqz2m59&kQ&y>wFBIDoEHNsFp_aX!{x6Ul2- z?G#~rHe?Mbt+?rwkh_z{2JJKLimHN}yvcP(UdChV_kB<0br%PvdUy4H0oUH%xgsdZ zLjXP)%fYKfa=V?phaa%z`MJ>BKF%QTC%pl*ky=oQ4|G9Q=(B-;^U{22M|?3(W_K|n z1bh_J-#CmAKcA0dN(?U3QA201HDMy*gr`_c4WdG{BZnbBG)lHR(o=@_QSR(Ve zS>y=g81!g;*Up6?gDtvET(3;}e%wC1!R&OC|!lrjEw8baYr!?-v@sX7G_G@60u%j> zPZc7OeAA%XRNrnn2m@`6=CbK|E$q zRlFycd+58@;BJ>7GD%9WoPaSe=!=GbfjP|tHxQt->r{pre0@5c97Gein3TQ<3>?4WtbhBqdL+a#)XNmhhA6qxO|j{RH}b&L`Lx{bvVkHG^S-(@x2*3H~0k zVLryRf;Ek{RwYGBb^+5#&2~RWej=7D+Mtdhl>l9O&>G0LwZn;HpIY3oX?srA=Y;Ih zPz;lwdXes!i86tZ^PB^$Q1*pE^x3BNS8B9LmyFZr*&<uaFr3h zxb)asJ-f&6ZQOWV`A*0?h8XM|Hr>fa#`AIN{HFInUm->_nIyrJvsWLNg&{~@R+kvY zYX2OV83^X{SmVjurfXI{&pAfP1RxV0I!F2*jNOA8k$4CpMm>A*Cjq=T+SIY0#RCx< z!+yTQ@xtELu_Rq81hZ6UUZ7jo%Q?ZX3cg&ke_Fept*-<=jTih}dBTF;He;=-L}gCV zUfEK%pQyZw?R;Cl1BIJy&(E0U61^+bDN*Ble3*kATeP?;j{%+|O7FCpBmuLu(vzs2 zt%~X8p6S)8u&IC{>usP9tLEMl5HAYmzck`t^joLjx@h!vbx8Y2%Wrc;pXbxO?eCr7 zyw!Jm)K{eJ>Ap=7B+-WTgb)u@7=`p(J)o&R-4Te4jC{c>1kFkI$m7pRzw9rn{?)_` zphZc~h>yhpO{Nw>zMRq_uHbB-msz-_m^G21xFHv&OV68h-O7A4mXoXe?DMRC$ib_8 zEwr>VrXC5-l~+GcOBtEF(ddCmSq6iCW>i4DZnQVdkl)qlGkMnX#!}B1*5j*3`vE9I zYIX`~Miww6*@hJ=&Dg0!^~_co8rMrT*q{RV5%8zw=-hFhxw}#3vzCgzq>mOq()9<@ zede+&Y{LlNOpX_|++1MJO;`EGxFJ0kQ3=Mrd5I&1F+h*0rAgp`1=mN(xZE`X3Ch`H z%{+|*k!703JrOUyy8&mu{vplYn@7oJJh(j++Dza2n#_C{pPtZs)!WC1o77(J4bFN; zRd&uhT!}XKpJ@R~ItM^sC#!3dRdwIClsuW7H3s^uw?e1JW#ic6SwDI=o9ZYkUtOmN+}v#84*uqTG73!Ov9F z+2kBj+)2Qmxyv@zQHb{L?(6iD%jmWVt{t~3z%}y;qvhCACg8owG$?#fBPx{~#`(+z zrJKoq&iq6n`!IdNl8G}lfN8*=%fa=XH`g1}xGv-eg_1knm;jf{#kZPp-!F-_P zyhwfC`}l9?(NU^9ctb>G?#=rE!xFRKzEuDyTQJYp=_MAu%4vBI#AcGOtNlc0QZd|y zwb8a$lYK#)oTAH=pf>~bmOjGAD$}rO%X#}zlJAezS5#`@Z7oIG{V7I;T-FZ&sZl=m z*<=oGaxeL13zw8HrRJx>xZ^09fOu6!l3TMbwx9L66DlO%4Akz^vNHl*^;M`PGb11) zy^AGXJ;a;2?%>pMSg*4|&8E?==lgQKdb`)0@8Llg_j^Et5~-2`>o1yGIO;G21dU(GtMjabAFWT81$D%^6_T)H>~g)r#?lUf_3?&NJhp&$^iYrk>(M3Hfh zFkEkF`H-gtYT}o(5X`X~D?P~Gtpy)CSk?gn(jRE->hZOtz2CK_QSaqE$_BL>)l&KG zA$JUlNS4?A=4Z!nyX9`iMWeX4v;0?v;pdm2Yqas2Xi@E_d%`zcd(FylbM$Gqm+8Lj zig;fx=97yZyMDjA#H2~pWki<@n}^<>y*uJ-l|p!Oh_Y4oD58wRuo#*?!=t4Av8yDj zq@9EuwDhsioe+A&fq-5$HM=*6=|l5Av>zw6<<~PNrq5d!C>JscP86<6unbq)l%0t( z_CNl(S=b>%Z9&R-C^8$FE+(9l;Rwk35zcVf&JaZstmb+g8X6iR?o$4=LFAr7s)-_j zoq*t$6EK zt?%NYUqIrjSwiwLl;(Tmttc|DJu(>HXa|K_*5{BCcRUFp!=7cmzq*>QH(H?E zlJD>FbG0OWEi_T6Lq!~`pk2~-Sm4UCb zVoplec)3R<`Xf*T$I3FGm)ti3&h4Wn95&e)6KX2oz=fJ=S&qI%!Il6qvPxAFR7EmT z!*TX)DC{u|=VdkrJEy_*~fubY3L{J7L2+P{W9vo#3 zz#>{sC;$|Tr8c^?N)5<|4&`fW8z*U0JKeUNnC`nYgkk)^L+RicS%DfE8~On3E;VPBb<7=oHyr-9tCxcO+_zJJ>OgvMR*7UK*=ov$$H0rUcE5bc`{+alYocL1l3>r)W7(p8A^=cr44`d;@-d zJAS{#j@1|*ru_M+#tNZ~oqhy>C_8pD{yTjwVI6Dywn;9(33Rcl@iUNJ5ZBF*nO%?d%B}MZu`X{c@SW~qKn`XHpdqzjy;#Xd2$}? z3zy?)Qx&alna%5{99WKCdHqlYs~Q1s&agb|@HI5fiU5Zadpu@=G*{=*-Qo-k?bpFH z!%X>2`=)xvOaLS8%{?K(#9Jn>tYaLku2`RxZ~B&D4=OA>$ONb7T^!XhcNzk54ME3IiaNs-IJlv8akK4)UiKm;(dcr5AH zVawD{`)lsrJr#oO*C|a7l>B_e9|%r)!7R1DO{^qM+?%!3W7h{tuIL8~;wz&RWdYIb zB5uS!uqDL+lIkz{6Bsrtb1_0dFU5Ypv%D;hQ=dyY!O(2e2R2gh+|cdl1gPV^v5+RN z%o*`)mE))D(}^Bpe+V}H@7z-24$(KDZyT-qXm4Lz27Jj}0EX$+Hv*6oS=J-*Y4g?e zgX+H9?;JoJd!os&^zf)6?x8hzPEf&ys=AgeQ={ph0kKeu$(bnt+ldi9?E|<#X$IW0 z5yvOCP%i+&dG?x^%7A`-&@FThP=#8b3WV-=_i2+gAG4s-bKY}XAq1-C-Zd<>j6v11 zO6&PnTb50(F?$t$aNeZy5OG@X*r#X5RjWj{_$k1#dE2fmsT1-4_5vLzMT+HS&%#9J)UTD zS{=5;k!t)Kme|M4D%|{=zN6edSNE|TU}QI?7#;OqoN-+NHV@wCx#>{_L)F_9FO;da z>EMX4^0vRZ3}sQixw)1GsyCwBQ3x-S@yDX8(%SaZdfoXJhx;gZY@IsF4mI1~;a#sT z^%J4tdAzs3K6q~FlMCBle%4Qg?-zCXQf&g2dJBMapwSU6*kvfm;b{^_9DpQZNdXb1 z6t`8gYUJBCMpvxpYp{Rjl3TANmf`w+PQAVNsPv&BjheN*gQ7ORLHnbZjm@$YtCp>t zp=j@v%jaNm>?_OIGQX*c^C)U#ujw%g%LSO{)Ar-#FyfzCmrVTx^K5e2pM@cNK(!bf zCVtX*()#!Zx9@n%V(S^Aqaa>g8ca-}K}v$4P?WilVJgHgN5h45A~|LP`ISEdMoVQ6j#UwUPa-; zsBx3^nF78WJo6ESw^bT8m`I*80j1+1e_ut^lHb=)IP|6vn6aAsWkKppZw=j93lWX zbB!d}9eBGx`dO$#!0v}h;g@W7omftFmO+rOK%(ZvNMc}Lz&X~^u=2_49WB*x6ZqrH zw#LOTh5Pq?1~Wh8yRDRLsoEX8#S ziAxKpg2n+Lv+nNO7|7T6cJ*Q@NWl#& z*D)%U7!u&5#%_X6SV^Gn27NKK6$yZKBm{^~0ZFg<8#j0B>wEWQeqr>s z`$MV|tHm)y)pr1vM>q&KDo2UT=e8lTQXZe(`*Puj;9sugDp6k)#~m)>3k)uv`sfUp zD9Q?u85K6*@I(UuUMgzq_@yN+6n2U7uxC-SV*y z!`{95dGG%39==`BhyW?RzqC&NqUbQ0UeIK^Grpw%a=*X-SaLy=Vb@*zT^r{YQj6S8 zfYU>?`_=0HztLn0fMr`1Vtsz9g#NDzxb_sdo@P-t+J94^xWW!BTN0UN^wS3V-vvWn z0h4w9yUl;RO=@90(?7HT{;ahAd$WMq{kMUy{AZ^?F!;|M;s57#fjM0N1(*L{#ZX9n zN#5sw<`}b|8`Gs^&04ZSaCB=wwevC?csn6mE?dOzWKK*)E(FDKN^ z1bV7<5&f=!?sPa7pedKmB(eXJ58f*Y?3K)x;^0l(GbjuOdjH4R8w&!*P(8!HtQct^ z!HT5dC)9n23(`he%~VPkzbLiG_{Sf$3daiuloc4JJ}M}Cag6KoG+50A$xM~0)9)*Z zSYg8b&FlAx-X_0~g#FvofhdvO5_bEh*UB?Ubqn-dyLqTqO?}F8I~8OZRljp`4NwsT z%LKx;(KoOsr}_LYhxGhe&szri1zzu7ZRf`&t?K3vOQ8ED6Ai6seDjge?A)K%HXvFa z)mJ+idAa?B;-rOJVp&%!*i!FlS0um%7X@<4Iicsoe(*|fJy(*IpAM7_)J{*ow3Tm@U3(H-ADpP zk9a};)a&|N;nM4b;)o2Ha*cL#yUQFSC+hMPHJMYuSrww ztJ5QZF8gUwxr>|jzN?~lx9sMr>%bJbeCrOu;-+SuVHiQ@kSowAbww{Q)hGSq{l6)K zNEBV^yEL|JFKdPnD+w~Mna{52Fknr?DXrN(>#4jHkDHUZ7%^1O+^tPDR~Oc2>yCL>^`W&;hGcHqN-ceNw~)hrc!)&l#c=Bb)^6W7G8=s)Ce|D zkbS~5c40dzS)#c#Ee}1>lh=!ha*{M85(FR6ZXIRCX@mz6w47@i{jnjgoUyXI8AkuT z7=qwm|6%$@n;hFI&UCObBt_&U$VUU2#U11SHc&`4p22aKep{qKii0jyUs|}F-P$MR znBIfu9aRsj=gf0nE1J6Ekqq=;{tI@I&bjM@Xn+6sS* zWH1xj3$L+j23(xt4_VokzZHR@Mzg!G&ssP@wpoJlIhQADBxhTyxWk{k<~hvYNdXaN zE$q(_nCEGh1+8jp-6IP!blu+`(bgK0rT=Zw0|zYO=HNMd_P6!gV{m6UTp06~-Zo8| zrO?{3)=2=0C6Qy$Xp^Y7%p$iALnN;KX2vC&f-lH`sM=bIH0QA&SJEvuNk0XcpzD)0 z>?w(KrS>xKt_uV_$i2Of8jbn>5y3w(ER`blySIRRqYgb3YPjY{Uhyp;#EEd5=^;8y z%7|U>vJEAy1GghQOkxq2Q*XaSMR=)vWW=`r%TjX@U&{8t&>nd@Yqt*h%=bJ|NUx)Q-daU+ctvMNYH0&~$wRrmic}+gN+hH&oN%xv5z7UXKJ_8_& zWyi?7LX4kiqxNukRCUNMuYscea+|&p78CCpZAbR8G|s^C(D5lXy@1dGn^9B7FoUB=tr`; zcSL2zL;sI&-oaA5wYde37&}yPsv~`fDrRxq7r6)($kKio-hYJkRF1cv}GM4@CLk zoC=esFKLB3rEc*C;3>`#Q6ou`@BNZ!`kW@%GuMLGwTF*unG=HkAj04K*f>BIVvcC& zp87PE$l+?~dgFhuew!V5N#rR#fW`$(Z+l9RG7}(f7um#-Hk&vlZm8PlaQ+eH zBpj~kRG!HVRVg}_ekIENU;fp50X&W2ZQHBle9~5cuYeWjnqR}ndyI>p6~WH5`fK1W z!q&wb<0uCzvT1J?`?5;IO~_v&NLR#h>v)A2<5%c^z&b>CfBX-FV~PO6J9Fcp?f9ao zH?~4jt-)6aI#R^TZnQROtR7VG3F-cX)PqlMw9-%(e%w}GIBsDt@|OY=Qv?3l*cCrj z9@t%%O&DVlLY)EkfB`QE?Au^1W(o6t2Uy>07}wd+aAX8nK)cw=Lq2nf2c3Dm_2lL4VK<<|-HTmSNf; zc+x|3W6f)FhKWp?NfO^n z_8zn6OMZ+@32B}#(k^`Uhgub+;F?aYSgkz9ZzBC71Rdx7KI@4oJQVoOYg@Ut<1#E0{8uJ6~Ovwky7!)!mbvH3b ziFI#t8ePPJ8siT*dxG44raF4u#vllqC>3Uk!zLI!sv?%YX7(<#AaR90PQ$f}{PUrI z*i)P^XTVra3CgQ>0bg&Pr`u<-(wqDRbHM1aN1-TeF9~$IWWLqa$d+Gl#j-OI0#lSY zB^*nY2Awa!@<54O7hWwRBqBcB%1Hsw9K^8Bd;!aAq*I<8ZtT_<7{etUU`v--#d+y6 zo`z8+?{*6gL$$R5;t73OdUC_)OGcplIW4gWA-#2;YH$OKN3qiGR5s}rCqfLjgCpGNAu~&rtYhitVm0eqM7MxJDtn4iSX$X0lKck` z9+VuPhPXQu-wwZYvBPF>zqa{uW)7SiT<%oQmL0qBer#fPM{>GV4ltyqhh=tpB=sxPB@`Pg4_J%XmM$z68cz=2rdHxqr5Q5Rpi6JCJJ z-?{X*qp&}bgQY-&Ru=cXZm#aJjq-9rB=<)q-Q~r{>{S=NwO?2ZDNjbB#-nSJ%ij72 zugbws;==NFI;BBr=mH>6HM`eJD_Fv_- zG~9sxwx|V>$mfteEoC$hD`m&WHwKysS#3g6)E#FV#0Cm{YGDguz2uF6hWaG3IZfKA0b zPOwE4j7Rkb_c=F55K3Q*YNn>$7s0K=qx=q%8vTM9JlO8fiZj5F9hIY$-2HG}r?6CZ9X(mb{* zHV!NRg5v-@Ahj6$g}{n-?6+@)fd}`>v`dDbW>b@%qPN9(wZ}ez2wQ5Yt4v}yk9|02 zP7+d#MsjC>W6cO5K0BuHC0t=>{6VI_eS4#lvU>Ou*_S8yJhzMeMkTG!7adDqv~uP8 zJdvb1+{;xfFFr(6PSwk--(>j=_%*2{~SF48Ft)O-ztLV>z9v@lR*=X_QQ z_5ys>MwaOft`uuwTucBuCR)_nY-t9(^kyZDQ|YB~rO)vXtV9VoYHW0#3gSiiRX<1L z6TkI&LqOT$xGc{jZ~GVpf-HYnq^^q}n7rqpi7uVfp|s zGW?q1R6ZoWMJeiR$$<6fP?;X;;J9pA(e1vt*jrNkQUi$quC>nIQ_^l1J+==ejR#X9i!g3MAQ%w^ z#eaHH8##r5qr<_Mxntrs^X3d-a*!Vc9ea27SX{r3g-SudwJB`e6uKNF)b_oV4Cdp( zN_b`>XZrV%tqQV*F00SXKy!N(g;nlF4p_;@Ywy)@aOF9|U)<>{L0Tq%v(ojmZE!6< z>g|uD8XYu^9#`auyl$ssac}=9w&xROAI5_DJ$9E+hYlZl8n%`Knb+M0oSD>|1~f@d z^QaO1ec_KdL=fq#UZ&l8Gy)#Uy0?WEQjbRR$3{luz(e6$WV}8R98*p@wGAcW*;T{n z()xi~@_?wY2Rtz=UmN*HL3g`i3qO4&1Nidc&B&~KoP}va1yhs4RaX5jSBKMIdu~p% zcWD=pVd_z5oO$)~*QJ@Nh_}?)<3zoa``{KmYc8^J4yCqnhp$c>@P#Ge5gA^ea6=qz2cz-whlTy!-#y`_gzQ+xPD*NlCd= ziLz8831!W`Z`q2nZ`m39GO~|IqeQblV4GO~;LkwdX!*i+of8W3R z{{NoW&zt9s&!};o=XIXPd7Q`k{q}|Abu`u%-H_@ZgS4@9mo|4`rKAssE49+#hEm`L%(~pWZL?FKm+szw)^Jyl&A=*q|fN|l6o4z$$b4bjZI%WqN2?(rXnJu zIF_i(=YrHT%r)nDJu*)LSK^M$P8gsQL64$uYQU)~o`-80Q&n8U4I&w996g>YuFAlT za9cbIU=#fPuDwfjLbMf+fTUNlce}qm+h!nOoDKn)|0WHt^&s1{oSCHXUh`|7n=z|T zFm4|zvN|BV3uuO?9JNBmDBA9esgK18zG_jn{KgGQwLR-^6oeU^goN4mQVlfAO5C^} zBpc{G!>e7pGj+Ohs&etu{9T_o=l=5RJPPou6^V&&DY8M)oy|NC#NVsy6ipcDbzUbP zymjp~GXM#!FKS?V>!g7ttj|4;0mnZ=TW;Bf?Z$ zZ1PE@UJ$k2@l$FXt=%cf*J9)zXdRe5)^tD~7$ujC@tGY;TVwRQ&h6%zj(i#V@Wfd} z_~u4K<(F_NCJj3vy7qd1Q+OO4q;$w<;Mn1gqI;<#g=!BO9htWMv`kwR^FpYP2!jT~ zvl;;l>JfFr#>Vq(?^2eus6w}anxE?2W-riB&p##Nbt?SJoYQu{O|ELfl?D7rA&tCg zlGc9q9HI`*8RVyToz0~mzl1_uQ=zw4Bd6`kALREv{o|(jO&yq+q=~};)4`NAcoJY6 zz82m)(9@e|pv%X#ODxccTE>kUIV{g!0ivrcTpHNbcz*A6aY?<+$QKIk7YB|#{_v>W zj*8Fpjt*c~8m5r+QRkO}$#uy%e=YXG?`c^9+i6)KcV+QvnUkS*7YF3AU$us6miDu_ zX@5La2#Sl^b59prOU1|$^1XLH(RB*H+%fY>Ov|!^trOQ8wLK~ba0!!#h5F*EhHh^O zQkI)MmD#7tx^50kyrtdoS`CzZdT0M#4;vT1<$G`VJ$(a%ymiS}Qjjzb>A`lBa33iU<&mfB3gGbWmv1}fYuJqM^pFXj`q;Wp95cPvs z1MYSeDAQQWm_ql}wqR_q*F@Tyd(Sf9J757Ll3mf@DeaEGDJfLWOIC-VXqx8Zd^`wrc>*h9`B~9(CcK=<*#Brb4w8Bnd zYCFRiVa9CxO>R_uSICZYMDd3NX9dGD8I&(!;tDC0bvzEsND=HwUQ6)9 zi(rpOJ_dCsck#1`=Cha-2Lr_u7q~Rq$$;JLU9G^a2-oP|d!J>}tTIN7umJp-GE|$z zgH}8@m^vsR+3FEdW>2j>~p?@EP3h02;;Jsv;5dp@KC0`E&P z+JCnjC@_WI&`LQj*?pDPTMdvzh^U`>`yDNUro-`WVM}5j){EK^d<-d9N|vfWv_qw9 zqaH^MRV>g|Nw5NveBtIpna8DgWBoVh`}W?&vT-#YKyM)LXkD}G+B^8(@mQ z8ESsxL2h8RxgTQ6b)Ug?#>ZGLt473DP&kIejxV5!CDQ*{?-lV-k%Yx_H_3qqV7!C0 zL|22=au^P<%Tv#E#49QU1z`dWSS1fLuEnE^Rno@oY^J<*bH8|}7ZyFz42-oOjUzI> zdh|@sEYBwK7qc!9LDxR{0_djO-T|EVp6A&Y>6#2bNq*hrp4n4MI{Wmh50|X0?AVj& ztFZ*?HQn(i*lRthsWUK;Ht%9BmRrOp66b+-=)0Q`x9&m`!QK+_Nj6y!mCoIdfv(1< znZub%s-4xRuDf%O~eP<)RfV8g9tNC9N$~Q7!^@>{Wzg zzWTEET^4!G5bJXkCeoj|eR7@5- z75caJsxSch>t_kIV@V(qpFma8YVSKd}kiy#XpV^zwCP>TgXQl;I2 z7)gb48t7NxUgZ`Lr_(RLlqh6g=Xu&Cs{fY%${23s)0FR;r9|H^mOllJ&`kh93Srr| zJv$`sWTrv?HLR8Fkh(BUnWnlEbU{m+3+|I(lCT*>JGAl))-tiMg04OVk1xycqfyKr$0AZ1DpOwq35Ya~bYEc~c>TGT4wwd5bJUDm_gXj8< zgHmsq1B5{&C(^Zp;+!x4dd~=v?d~jydW3evCcU&cEsEFEf!c;OwCe4#@$zTmgTV$V zj_-eE2Yv}gy(XYiuE-F09QrQwKZ!6XK*n;MCB*~m$FJ9Ze!b`94u|Dz59NdW(-5=9 zLXypkT0+hD47R*}X*`A1KRUaz%q0T|Q^XhbC{J2&>g&A@K0Nh^e6}*GMVYlZ(zJCOCMJ@Oo8!H!kz;~ zx**X9aI3D2%`ASyo7ug!oy|w`RBxK|B-cSQ@Nm!)F*icMsV3*@#YP65dJC)W2XtU) zscn(zM5M7)ks<7{AhIGEi(kSIj@%YwN-qCs-~K)Y9KZ;3ZlOa511j)sk|4x>?3bp| zaG(l||JH>f46omJp4yqZ=d81m{%-r@w)SuJ$Hj5nH+2DmS$op~AB3CV%xUhFqpZhX z=-nV(1-rh=m-QVYk5n$EVo=&avCgs+ZQA|*uj5R*H+g!(J;dFQFtj=s00Z^M65~%L zgxpZ`WTvIE(ARbeY4IAE8-`V=r%>07C)w9PlzoIaM zRQk~hQL3FO)dovg=?B0=0qmM_->oroH5%K7H~G3lWj?WYzY2@@{pPJ=YJNuILLD;+ z42)ai&!wZSP&m5D#5*>xp|Y$TobvMW-dvG$J>DFB1tJ^FZEJNVox!D5ix_jJTKn$8 zSxy1RGpQ(pXac7|w&lPuE0?oJ1z)7EDsl|&U%(MR>bEn(UDG`reH4*ry8kR&<=qSK z*}A@ma4W#MK1%(a)3gSz_wmswDw-2ohGldmza}@yah2ti!E7$)SyO&fmldrbh>1^R z+OssVl(9jyj8yH|6Or1n!PZpVEo`qC_$%!|hGbW-xqV<3z<{h@Igx~-u4!M@X`iyA zdVLQNBp{8iLd}Ogk>9uai_rM@$oH**jEu1B&Sb@Gfh|9;BbKgeJGIniJVz1Jo6#pp z|5Y1U2@7(!+;}sZe2yePQ_Y*k!}!8k%9Yw4*0iCMY+(tU@Q#?|S|VcA!f=RzE-Ty~ zQ*+&$(o?>sWBAed&D#cE#E9DOL;g%bmmVm5G0#rf z+y*2qn8q$plkm{tMq@`RO8ItfLE0H)Fa5JLBVsm&2Dz%}Q0C(760iTw;D`!B3J|$j z?BTD&-6f=;S1}K5pSpS!Sp*J`l66OPrtmiy%I96NS*;>?W$Y?I)m*!3!8@m?r+36C zVB!N6ck|{TMlZbaSN$=aN4nP7zBxT~lB|5AF(UQ#TN70Rq$nZ{mC*a5Cz>QG4c>oc z{!}d_VEN$UMAT^{YEPqb; z@5JEWpS;rn3u21&wf;ZIi2q)0d0@Rlxb6NL=06?_%oSKm!fPMIfBfTLUkm{jXkVlA zZ?X9MQ>WXJ4?oGg81i@K@9+8S({Ai7#QsM)!_gsrrf}}m%4Dq`Ts!kBfK;4xQ9v+O z|ArR#cMCPB7SMo|GIO|NaIjZ zPh6jmlU6bOB92H>L!@RlQ~+)1lVp^&)>p>!i8%hEzquk^!eeCxz|$NWx7;KO0YOhH zO)V{-jWMQxW!&OEwcWCQ*(&a04BepIu5Y}uU%=$3K7bGEn9WFg&IAyEYx1%azclVI z)pzt`{#uYS^(23U9MvAu)V*9VXCczeuwlz(iyf*_M|B{;J; zv$mC03K!HfS>v&dSH+rP6Ag6Z?{XserSK2#-+#0A-5ha)Xxe5P1Y*0Lu!pLtN}QhnQ`qW6}OsH_E>xa+KcG~GbR z!LE@>`G(MV!RO@e`jz$?3p z*^h_v*6Z~f!8(gFH{Q((6K zjYUuQ+eCd?#35h326ewc7!z?-2M7r?+mxs6#1x@X;N z`VxpW9j)%=Y^m*kqQG*;DilPsXN-o(I5b_6c{(tTjshrn?vagZ5av~m$(di$0<|MV zrY9*^2d$Gb-B*LdaBd^&rOTe-nfAw%rUUtNd0`2-@+a9d4Ue^s8mo%m!A!R?{_?Yr zx2w^b_eV(vaPeS`yqT=RrTK1Qrso@mjsP@3J&W zq+rThgu4=-xh+X{@j_?T_ApSg?bHq&;9;bn*j5A;h@7mRS?U`rBiRN&xG`!(#Od> zLBS8Lv4>JK=LR^eXF?>#2c3dU_|wGQURHAUwR<2A`G7#W=NrXC6QQHsied5ROZD*(fIE-2Pc^Xf6gFZi3b&xAZWKtqHRs;!Uo%Dv8 z{_fXB)QuS(FXc|Blc*biK8KI~phoq^YDzml0|0|Rn%BSgW6V@9b{!XX=&M==-%S&? zhgFtbrlFHr>{T4y+4tdw9_(*?{}59mTp6-62=md72+s;oljSq7i$&LO;WGsRxN@Hj z-!rvC4wfSQzI)CEohNy(ET(-Er#^X9_wj_2o$=3*!!Pp%2t-9Fn6`-Go^B=e1>4xR z2mhe$;?dpfRveIQ7F{~;JalrIkVo`zj_nqyuHUo)87<18s{GgFn7J#(n-Bau2${_Q zQpC^NnV69hBCNV`U%$*pl-_7@6Jq|=Em(Y0;)d+dFwiR_u%Czk$>D? zvDVZnHGTAtoGTZRR@TTFzomhzM_E80UIOw#PcOFxa!9uzJLKcctz-a_(cy#5<)Ra* zBx-)gK#%%-fSYO0K4{dU5iEdc*rM%koqn&%`dJvcHnx5rQXYS%V_0)z(!b6SW-(yO z5xnHJyC3PHV)UsB1@Kjs?6$8^*uP(3v(6(7 zWblH3ww>`p-ZZIrk$|vSu<8TzGIQg-I|TY#n%_~0Sm20V1zAaxNErvPMI4l{aEk&B>H?K#-5+am<>c@3;cyR*YafHVbw=RB&FXa+UAPgI$#)^95=f(Q*; zmK|Y)jKF2hzVF#kliR>Lpqqh99AKAH)lEa`N{^3^GRl?`oJTi<>&c~)c4L=?dJ5*$ z1CIB=`%@QAH@ZH>vi9u)rj927NbtC5^j`Ct=-PZj^zro-;kYIaIq!S%KN3~BN{>Pm zTXXNc{-wlg6sDU37Vf76bol-tFOTR|MXK%x>Jw~byF_6+{C&x7L9GgQ-LFhpCyBiRbxx4f0dAW-G78fd=iH6>ma~FpibWZpX z0{Q}ga2v6~irobUgRSG69b^nZvjGz z(8P_@#%lvnOWtz{ym~1HdXkz)mv;4i{hW&VUt2XV7}~pU-esXQ<<@kn${nNsl93ZP zBGdmQ)$+ubsX~*=hNUmZn0X=2neUV(Sp@e@sv^c$#(gp*5u4UM7g^rW;lJoeWt5NH z5Xi6|ab%WD*(E~b_qOm|0GREYgCL~tVg@8R1JGum?fHb5<7m;j@9G>9ZcQrD@-1&9 z*Ccj-^mweGnkIa`dKm0i#`H1+vx7gDExF|IW_UKGwYk+82~|vb;BSXBuz;6OI_3gx zw3grYWKeo7cS7p`Y+8*7evw+w=SJZxT(M!)-=P&gpW^BlhSr+pCxR=@FO3qg{n>h1 zc=97-`^v@v<+JkNZ(9}H;yoI-)QV181@D#w1JIpba(oLw;>iKrjG{~N(rkQSpq>nx*U&K!gNQg(dGh-Y{Pye z8n@uh^{2bvBP9Et$g1}s$2Rl(ts6k-_y1b5VvZ2OdB9h3+de-@JY` zHi_D2k;2Ux{4?&cJy5}kbHr*5Za*nT0mjFxkTas}(0#8V!n9y{3-{Lb*jVZb(0nL| zY-WxBYH(%-*rokJse-)G5mXpc0rdr)SnQM!n+X-C+k4V`m+6)L52i)(jU15q5dLR* zq1-xbO&h1JowG>B_=$y!nA6hE7+FsIRR99cf1+5nLnbK3*?vzQ+0yT@FRi-`WXrgK zny)e1j;sL-kkUbn9EYP@&_P-JZtd2gl%HqCn~ORUhpRz~?#ol(POb_ZZ21og^$)Kv z^Dj6MWv>n;3R%I-N8w3MpZax)EVo6WZ;{Hey>#Ejo*bRm!Cb&i3McA_M8hsTG}z>{ z0RjL%ee34BJ3x!K*Qy6VH>k@he$GH)QT5Xy)jNMrj30X@mr32)f}&9Kt+S*h_k#*> z9LsYsL#+{C#n~69;*Us>UN(vMg2b``G?iDuUw{f zWbiZ3C!Sm#%JbWX7~(_B+M^DK@iWDyMr8hpZgziP62J%qI0_qkpfI~U*7(9hHIlZ} ze5GarF&~6B%kzAvlogTpR+6*_66(S$&m@*1cBk>F2XTAQ%u?aVW3-316=+R4>9bSN z+7TK;+e1!i^$;>fntt($HBBfj&t{i;_T!|GWm*G;&IRpVPf-zYR4717o5dRNI- zyNty0YNIC<_!bTcm=jO0d>e0FN9zDU+?vT#0^8C2bu6vn4lWwcuZA++cY1eR>6wDY z4p-F!=D?HhxZj0OpS~34=K9I2jj{gzYRwya&Ey3VvT@M_nYlWDO-l|#Sq#XVoXo^z zQV6R_+wez<2FV)a>6v{4-VCRS?(A9nqKDUaSaK*OJsV@p1EAe9f6RCG)KSr4m+6Gw zG7}Uc4ad2{U)F@BRABq*Mb@XuoGIi`EF&|$)D>KEx<{-WBrr`P@Ghr$jiy>V5EBAk z6Xuo;ksEJpiD0}Od=KpZU~ODWG=^Cao^60`oOC<5 zuf$~t)J3Y|(zJyUH?J%$-QHvP+Ur4cJ#&enltj#Hqs4#VXJ8ThgK*PZ;ZV*p9UN4L zM|&i$A@8(}k4p}{Q*JoT;alZ2xuKy6T6glaZF{LR{ecg#JB2+JtTZ(>1uFrfFZoWa zfLYTnVWRp%2~z2e6;*TcmeK9>%_z!O=JrmUW5(Qe+^1IaBC^)u?%L%IJOh-KBxTVc znybypK%TVBTQDsuv5g=d*bpLDQgcsq_V?AdW=l-I6+JJg0jE?s^A-nYJI!30Sd5(R zSa`gSZhhh~$_Aj=Ryz^P^YZA1ngU>jyCXA`FA)YJ?{=%Y^5AL?)^8QV=mJr32RNZX zgBBQ1BbcyJu}MHp76o#`4z0Th$%;LN9UpWISyCTHU4)_OOD1M=)zMta7$DV(_4~Zf zh94!Rdth99AYw^y6k6os42uicIW z=bgjLRy^&B5$j@~*#HSizs- zDi5IuZ`YNUq04nl&xG8qWs@;9qS_Y2K&-sv&0MVRPfTf%^gt+i@oq-=LH^8PkEjCCB0oukg+!n4?{9eblt4+|ODx32EQq;a z!_J!=q1=X_yNetw>=VY)9*l%AO=)xJ>_KC9u-AZ)h<~~+5|OdtH+Ig6Q)hw$L%2R0 zr_Q-ea}m)nEA!m$$-<8yI9*h0CLoGyF4no# ze~)q=IvBwja;qQlGk)8==*yarhM{?ratIXVcCdF=-q$0IYy5Naq(GY{Z#20LUtVEn8zE0PyYz&(wDb^6TgZx%UyMu)fBjB-EYz9jx3Z0Q6~!rTe5>w!g&Y)yp2G8 zn_%e)gOZCjm&Y+ykliFa_eF?kw`VAx1)3G>kbqqXc z71Fu-T%GoehC*4tM+P=oJsoBm7cae+f5w6xcIwor1%L*3-LY|(k`@tQgD%A=n(Z$} zmZ_YMb#6yY6D0!LPPY+w*pV9QS*HP5v0xY9Z7a?@R^13+Bk5vPguZ+2F)eLEmM4`k zlK6&|@DsBw=UpknzDT{+L<|`_mR8|3^Wxq1^HJ~tirqgPA|JH6%c81Y`!=!4P%H0P zT4}BowybG6(16A}4)MCOFN5h(7`eP7Ya_*3?lKoD4bgZ|Ybl^1Ps2?AwXT&Dc~omxj#XxEp2f+ zWs{~0nE%F7C}lo3hG)0!BB&$56r>FLkU>ch>Fs>lIG^c1vyFJxn^}Og8r>mG%{(Pq zthGg9I==dZxTxu|3Vy?)cT;pY^(!5qYXU`!? zMes62)~f6Tbb@69K%!S!qwx0LRGE132qn_LZ_pksIhrn)ad@VB%AIv3c1h}HYmOq# zJW&X~voIPg1&c3H^e@4URS(dLmNo3rL7Dep!ogNE?xG#*CN|(NqhDoq^VZ?xwpt!` zLfesBJ!Shwt2-#<^g2iYi-)qL)^IHzdcWTk6+;-NvRO$jr#6-%Z8_JVvx7ODFG}n7 z&y`{G-M7ab4{?}&wpT4nKlm5s&dvzyw7~+aIXs%av#l{SyQ|O4!S%BF06w&Mbxz2T z`$VH&N0fkB?nr+tE%n`-IoVgmGQBrWPIF03h=oooLxX-kJapQU-BCgbD5;~an5L`C z?|04}2vhb8+28%y2=(g7=YHt>(gj_*5M!7ElIsMC6l>u6{il9Fy++S8RSOBHhj@=V zdsF<30`&au&3o}?xU&Sr2p!^04z*fHM(}ELS5rrq+Oa;aMn8h9nq~dirck+|X|;>} zUf5nZlE)cou<+c<0xNG-!QHkAy12wy2oSJ_1WdK0`c5Pp5Mw{qDlGXeg1Of$d+-vj ziI<^=9l|Gy&|h!`nXXcEh!4G;G7Y_!{1XweAy=yvZA)YUKxLzy$sl}*Kc#6g!>6V& zs$vRSx-e#SN;Vo`@aS5Fh<&TMaQ>-cJ=zl5GXr<`c*|TBIJ5u8y+_LX>w|hijKp5! z%A^t5rtrHMD=KX*)#07EhB#>S3bff$HfRTVtGX<&2HVrY&tn}6gDs2246|jK_GxTC zSh~A&{0?)Ct5mKs_w0ge=yBI{}J6T-f zHK!J0Z5ftH1_>{_y@Eaz$f45Rmhxf1w{R})FJ@CB3f>?n7@%@lmtG1spVxBJ9t@W+h=4b%5+Swh6U`AM1Lv8>>KOC?xgd`i zzRqXnH~8UUSD!JT{h;E`X}w3)VX;Pw;%m?`)mj`r$(trrV$ftV;ZorV-G^WbrXo(u!>o$r-u@Y^BWlpVHDKzjy_DM`h(mwt;=tK)Qt)!z zusHgi&WS()kbx3m-Vzj3r)(^tp)DGwm-sXt1)^x9fS7V6wHS;1{*pocW;{bPTbz6< z&jTrq7XlqS09^z7smVptXiEOT+|%#bLP;j7h}&asSpn_N!cH@gckFM3D0kbe#6M4g zR3~dsl*qeJUD|cAD!EH}@0LK8u?H2Szc-)3Q6kbyYfUH}N8$i_ zcKLMW!f2F}oX!QlS~EJ#XzH9gJ}kREN_v`8L&b4&d!BRg-z2Pc~-t;RoI|?^?lpY40~SDMyg<8G0YJz0^DqwqP

    ?D-w+fzzehC>2GV6)P1lUeL-zF%2gV_V*m-?*Nc=@1_8h4%ER`2Y{EHcI@$NVx?D5$9iu=6cj6B=YBGm_a2OrOu8V#T2agrNGb`dECs! z4CefM3vVA$j&E+sFJ(wa%(rm9y>hqL0W(=6072t! z85FHq0(l|iTX%2HhB^3KwPn@-w5B&a{Bx)qq`zt7jh4=#a~W;P`?Vfs-Yj;}7+(5E zEL1_wpa4vN;3}wrJbrr8r{0gAlrsnv@Wsj^H&7c9WhND#HqZ7aM zeDY@4{V=*vmlXgSPFyFYPc~<0L|BowF{{)XQ2pu6UMQY26Wg;QHo0&WAI=rw>MBuOb$)eo-@Um< z2E;i=k1X*;lxcLOL;reN_4*u**v+k;%4S^*zTfO_FJfTs(!Cr6=T4f$+KKV1fCR+o zAPt#}l8Q^nTwLRKQk2xI;!CCL8vY(rJH_4K{8Ok)LgySN4+6o>1i|OXmqYxkeF+m? zW>;_=Z#LO-%8rjJN#yK`KF$MfeKuH*oztn|UZ1kaq1i(0mP>pZ&j*-`H2SSX7<5Em zVGRPP?XzDX@*lE|Yv{`9#=ex8J$NwRzXlwOMop+GNDV}E(P`jJ_xACj_z{{mII3Uz zI!U#k=GkaZ|4b8pnqA8H+-@ju;+w@01+LBJR!T3n1>TsA>~1Ps3BsI_oq`@NG7hp$ zi;vvd{enU5wdnq?hsu$nPYfw4Ig=l?E*vOkO@h4oU{tn}@>(OJ?D3C;jI*5Y6f0;; zJ}-FGx)MbiwQJWxJcr<(K#YipmRup^b-vWrcQmK_8mX$3X8UhmR%0-y}-wvoZ9dyFmTYwSSr$7>=@~%Py7IJ#9pk23b{0Ag>a{ ztLb3i9V^SB?aqPC8rIh+_clRPbZ2G7=pICHd{^cg6M-7QM?BS#h3R*Vvqbd-S(SG* z0*UY4UnynC2Xs(q1>P;j{-yZN6Hzq(H0pnYc%->DF3Vuf&_^OUwMO2k#%8#~b z4iDZeZC(EEo5#k+?NfVM&}pF61PyFioDhr9?!%P|03G6HM>NNG7S;5(&eIc`p<12d zb3><1^QTJ}N9PA;!!y?oOmQBGuU&v@!51j#;qSzHB+w3>8GM1bA-m+cX3+XlkM7LU z>&GvCxnLyUIkwfb;J_^-;q0T;C}Td*Dv0u1Nm(MmiC+FK2iKP1<1_syP2JHb8Mf1( z@C{;J%foyE9iOjcH(J4S)w!zZ4AbPgNFzL_Wzkalw$X(rNw_?reY{Fc_b4sO*)85N zA;^u9Q+y1+_EoSl2B?#)PMA)45=06ApWQrz^j?0!JfG%hwP}QcXSusoST;BG#K;R0 zlR<KiLq|QL11N07x~O&d3Eqs{6LXfef(2vCRheko&#<%#rOY&6$yXRe;g z?aYw!fw-5O429cTO(M78Wd0U+h4lC_!b=9WF?$$V96A(72Oxp}h`Dobx2G`PLyh0X z+GPizp@gp6gRNyk+=5_+u&6Dp_ z{?Ef6=`$9&09pDF0xHi;Pad3XbrBhOCVsa(^AabBZp;|8dQ!)WQ}lX1i1iu5COly+*&^t-x6M}EL?k5ie1~-#b?4fnC=k_#h-=Ms zPpca;eDZc(r4qgI4RPJpwMB~RX5h?4OP0MRi$?Jfq9kEq*r-y}tctsG}eb=Ggf0|r}0epM@4$)sVccK9FPvrZ` zT*M0rr&!wb%>!H}Tuw}5?sW(8Q+}A}D)n@15~=`-PjMuz)F1e|xQLXU zG||+|X`3^}Mn>zbdM!l=r8nK8kO4bN$@Z5WMV(1Zwk&f$*4|8~6w}>2?VCntSIv1x z;u4(mV>N3Y&o*orWJVTF7l#0pRfk(^=#K2s*Ry!YA%vXde+Ar@tIkNIc_QV}-fI$K zkZcH#X?eYvA+Xl6U5;Fhq61}*&A9;-=U04uojVSto`!-?yRW?)DMG(Ga2~QEZRw$a zU}R9tp3so&d7Ky*6F0xCy1Pz=vVeb{@R(8{(kK&s*FPJk4x%i~&yTObXWEq3JhoS5 zL#*r%lz2XdD-b63Xfyy5_u>RZz;ouTYt{r@zS0xF?B_F@#yr0zF)d#Gm|9HGKTT+w z#4+esb+SVIIx3Isg*pn$fHELt!uGUlb6=HVqz*Wn=EP=FjhksuC2^w$#`#QCf&E_c zXCN?5C2VnTN_5Mz+$JB2a5-k(yB<1`4NjN2=E;4$0me0c%m)Kv8iU)QJc^sM{IW*5 z7$}O@{@iFc$jV@D5&F2&!-wS(lMqf6tJ_4?AC&Lv1~#do(oM(rm2={c`)-aL$98mt z=}c6&$A0d(vf?tYek}A7)6FQ?udSaSZ7Y9RI}TX8X@tY4)W};j*O`(^SyIg!z}CJ! zt=^1WFL=oO*?s;r7bx1<7x%t9m*NbR$TuqsRU@&&Pk$;LRqr^Lp8GJEOa7z@+8T3P z-V;~ds~!?uEDt=HSQxr6tUobRU7Sf4*V0Ue!}&A{?of_9GmX%7DItvg#d4KgkW4x{ z>EajMOc&s-Ck|S-j^ZD;*6yt|m|7C-W}}$1;0X6YpZX3$vY=Ncp!zc{%@n^X!(}G}kndj0F$zEm zVy6X;-j>2T+E9mHzxe>5(iQS7s?OMw{T3pJx`vB+Xv*NDaM+@_rrdD*w;Wh>@G$CT zJF8_KkQ&F~qmItRfM#ezYK}#PbKITXp0ehZO-+Z9r^POz>~K~{O+d5U7{CeX`w5@G zNk#jyOo5T81#;5P-{WAsQFflH%xC>Qp`zz)^zcCNclr`meLiXXT>3`_R0Na;*=A57 zRNSwWh35`?pi^KBPHF<6puZ5UqSK?y=fC82WyQyB56)hME;E`5_TqOt75HiLDm0kh zmH%cetwGy{0_|YI@leh%+da2BYRud7e1e`HBXqD+jdjUj-b(yS~Db=l@ld z^z2#mol^P4I?b@o3BQ**7;XOW;osN(-nQLOs!Jf_6*^yh%YNJVUzEBgVID5dqkm*Bj=lgeJE|C%*w1U#UT%=7 zl9_P(3>Nv~^Sxzoq`F$U?Iq*c!k6Hsnt*79+ec~7{K~HKzpAZ&&oPmImaa?m(gi+e z1-EeLinQ!(&J26E93e-785)y}%nL0VNdq< z)7L?rD2cXS9lbUA#3j81Ol=uA>NY5YylkwT0*ZAV-$)8EBtQ*nnFVn~)NlIlF9D6%58XAT07=xD?$5Icj%-KcF~RlB+3lf!v5 za`pHn8OEj?Fse*&YUo{GPu_Ijue!)08lR^+!a60@m`4A}Z_{8x4H!0SocTg(I@>|R zWuW!4xiU>S+djWAQb^2M4x?7*kYh;)#3$9M#Woo3U&07S>)wm_ZxG%O47r$A1*Nn9 zv%%o+R|AAov7%0L+kIWktj{U23;@@Zg zxT%)|)avL9y({HLgSf&BU_G7l*!V~MU}eptb52S=@kRKbR}gT!Rw|JC*f*A83l-Kx zQ|9xN2AOBWE3SL09CsNl=2OqFPWCbLk582Hi*Yi!WxGh*bxS$P?O%saCA@WbDr%$b z?eXDDk#8y$uiB_8fKoHjyEj?gT#~=(U(>j#<(OdmyJrb#x%-_w{z+TaHOyebDhwf< zQejfaiKkVo0XW#wP3^^QIg=kZn@Fvexpq8=P;J&*IIr;g*?)EUJeqARF`JLN!0;L9vwsiufQr3K zm|sHNrP@>Nj}L;>f&FfRe5HclqKtK)+x_8e{P)y#+(k<6U;y*@AL>nEm%3i{_=2M~9}szd z(~5mwl2n2DICWF3>!W`9pH0z!P2thURv?yi%a81O1wamE`1++;J@)o~y}jVSo+);I ztTBblL`=r*yd)j1&G+f+8W(9;u0{Oypucr4*Mqx&p1(k}b^Etu0m}mz@xY?d@Wv_s z>(ze!`XLp#KHlt>{|#vP_q!>aK2r3d37uE}W2nD|f20lQ>u>wU|70utJq8yYaD9Ah z)tqPgmni)D^%ek*S!@$h9QF55e|=)20gk!NEVnoPKUVn9|G&Tt$je>%(cc-Izohur zps%QoEIiM1lK(0@{~fq`1K3GLshptydb>OW*5J~;*H@_jxdp!lRrm(H%H!Uc>i^cD zW}^VC!RZW@Gyf9cUo*?G0bb=6wqxSIy~$m!2hw!WSASe}6&k<63CZS5fEx3H#20#! zGn5J~8k6&QBw|A{^%q`*xY_3faI0S2*&enRw#Cs2wZ*z4?SH%yj1#G({pYbc78=HM zlkaxyRwed+-^PXbW>Mvhi$X~+h97AX#&z@?z;RW%f%g+gxD(~771unMsPA6p4kAw` zZPcP2x}twrC`w7&rJTRG68kt!a)UIV?>+RY&l^8Z!~L1IU&H=5wsh+DOmEl5MN#cy z192Lke{bzQ+R&+_BD-#azh>8-!nf0nuYZiB#pgpgbv7KfCb-myHv~5UAg5;dB1h*t zi+Ey2Z=w>*Z6f!79Ox}%PV#ElDD}qCn*97SOdnDyuboO4!_J~R>Q6knZ zV!L}0Q_NLSs|YE*A@!Kpz^Kcbs~brbA$>9?yEwb?YWv6(4`fGzojoEqZ*FYv`RLq9 z>Iefa_%)>>36F$`Luvlb#e*L8o+ZnY=UB$IX{C_6AB*3qz) zb*8(A`QJu4!%q@>=Lj~H=K2ryPicC!uR=xMM~1f&ZXA%N1PE0RFy0-*>YAgd^>fK(wOJ%IE;=tV5F5JJaNs=Ip^{@(ljKF`g!Jb828nK^Uj%=yin8QgPpJbb?xLavOC3CV(UFv9v&f9_2q2CKl3zT8s{sh+Geu2Mq-#yLSq3eXXL zw0yhRqZekrOya`IdKJ!#iN(E#iN=ug!{ooJy)$i!_ZeP$5vdaLf+=Q!T(6^&bT?yU zyXOwbbF#GFVkqY>|6FqC7E!q(e-M=f>zYdB=A~viEeH$h13 z?9qkL3kOV7sW^v(T0)mV-VHVjCCZpdou<=ZKCcgIgm@LQDtYUoG(oDdW<{mVwyH|+ zbRibIvql5QHt4i1(|pl=a_SP0THPD=A9$Didbf%f{yZBqP@6*=PQ8qyG@xdWHnur1 z_nwiF5f5AtH~+fIoL&Z93*h%(>CtA1Pam`uLh#;{%4WHBvhx`K^QUhgE*4++GIL{^ z&82lT@vT>eSMD6)1e!aOrS@#|F|0dl)Z@Ob)f+)4q|{&_3G)hYk-yt($25ssTW|X` zm|3H%{bgUAw&3iPppA8$g3{UhBq4WZYx_O-G*jCKGEOMrQGL%eUw?_-JHh^ zK{q1RQfC&WF_{JeFsGP8Jo9fUp4Tg<7_$lq5>axpO>eWqM$t>Yi^cdB_9c-V7QMtw za&~KA?+$g$Rx&mT1J1@AjgW7RLHPF!$sk!0rOQ0)Nt_1rLie`H6Ou8IX@`};_I&=` zaR(H&P{FhFNvArlsX%v?MNjgKi&opS4>4_N}FcL*oBb4w8l06!L!9UA$H5IB?onhR*NuD%hYiySn7|A-ka!R*(n;#!8 zPmk+A4obl90I>b8WX&5Q@tXJXBesXT&70{j_D>Z8bu&TomvELr%lLV zm9;&g8j7Y_ZG(64`PqlD<>im5OS}fr_c3t$2W5{vslh(fC|zyvpnl$-qC3inJYhW8 z_+_#OZ4osISb+AADz4|N?PmN(=R>IYSY5_Tej9Tvq|%7H2&CbDk%x$!`Z}?N zp4=U?66Bdz{OFxUXK&H@KJFyZes!G6Fq6O;V>wjd9%UyVe6iN_b}; z?znFMo)QyXupI8K3{K(hA1u|2H;PeZiff$pku*kIuj*LK^DRg6U1qPq%2QV9T**1+ zhSIjVW=ixdS?C344}va?v>L#_UgpdlS-TevRAdN+<+sS>HaDwkWZ zy{Jw=((m=6#R4o#C5N-RKT*u)GNhCzsxL*wQlfUO7FFDzG$NI{3+^J}u8O0T@HVoV ztJL-|vXAFZ4nL<=wsB%AsICkRk(N(vs3KV6w7mNu=pvQEVi_qUx*E*LUA6EsQ!CVO zYcL%uC@R7>?6(3c3ms_u^73AmO$s)iQS!GT&DjCwj`tUi z4!h?K>#s1sUB%T*Bd%lag9YLFmL=C%)Si)%eu}gTxea{Z|l|4*(zJoV=2R= zv}ONpi^+x{&nKg1)L^u(<@_+z?H+^`>yd}%b{rqIWfL!RylLt|sOZPcZ*fWVgF@LH zgY|kJ507K=KT$yk8jy^)9-lcDmull1!7su~8@I+bvTbCZRfF}HH}`*KT#VVl&Dr;J z8J&0D((A@Nb?XTx6$J3dAj+oOpP-9kJ!}gWTI0w|iEjChE2l+2mVo6tY3kvo1yp&i zKFJq(_j@SsOR--cCDu<~Qr#Tpo$PV5#oNU)^vdMIjC{1=%n~U%f=;uxRmPywkg)<| zyjHl|x2-o@u4qZudEt_)Q!cLHVzQi@cnpnTCCZ=Nxg_s$jQ@^eG*ZU-j1_%{b&k)% zpQpCurV|GudwHD6QKY6Uf~D@@;93!FJCU)xrgn_pB1G?iUYJOv9WD(8y2ls=cYJW_ ztBGIToV>PF{PD<#fuy}QQ>21PMaNDjv7*yEhNk(zlA^qvvv)N`q`fmC9!8=XT!@H_ z^S=zks(H;+dN#V8aywX*K%fuF~e-XqxKj1Drj5XwQJx97fyzIGS zU5PJcAG?G@%dV8|eC-U|E7x~iX=8`*!^nx|K!UYT#ZsglcRBW(*|-Gn%MVjkawf}< zmizpr?_Y zj0@vWD%T!V=3+~1pon5yjJ0a1rENi+*9cvu$nZZXgUw>@!W1~Tr|y2n@M@2|wExsa z_uY59k{`T_+E2M1<5<2AEI-y!mXamc=RF;wS(2R(D@$w`sk6V5Gr7I-~pG<-#m5Rlaa z#Hgex?Q^>$?6tcvIKmr6^PjnvDc3ai4mkDrt4l8g=%KHZWrq8%_BI{TFuLip2{|*A zltw-Siopo8zJ;Pyx=n-Dvp1}0a1L=9Axwg4Pq%c|`n-g!Q?s6Ceb>8Z`O{K})2QF1 zZ%wcg4PwQIZs>%uD3<{&|7OhW(kDfUdBO37S7I!hKTg>ySPu=7xNW6K*6KPQ_NlGJ zEnCoaX6n_p7{>_P$f{SWWJ2ohCjFB8=Mny=3HdTm- zD!q-7km6%=%Q>El`}8A&|L(2Yq{Q?^BHk*ks!Y9 z-k)fDl%VJIYF2ryM)8tkP}Lz)(ZYuq(A;)fV7kGqoQnuShi%1y7_e+YDuor&>PA|z zp%b{7u@>7RPQ@))Rk>Hd^T3U$9OyFI9ly0S-QIbQp^bU^BxJ?z;&nN!gRX*`N5Gh` z2`o~tZiCbR3wqLSd%6b}$33?)kzfXEr7sSeo9=&_Zy;LVRg04-RLR3nIWTP8H?`NU zsavmB13|29xWQ=4$X`Epj;(InLR)o>f>u-K-m}Aw=q`W=m@kp~iHh6WU-X=fZ5cM4 zow?cs-Gi2nHtlf3G?d+f{4sU%LYl7vor?ymQ3-ja+CS3o{sC2TgXq#ju#S1^FdDqAL);OglrerP(xaEG!X2ft49pLP*ZA?2{=3GO|u^ulVNQGQxeOL`fhw3*j{bIDY zVc*0_eCo5<=(?*d&)j=cgabqnFI*rvutxKjc|sRz3P;8GDAWeChPAzNTZu}hY-=5* zvBv4o&($HIp&YrGX2|R|L`t$@$*5Stfmi{7W|F2VG0i6H3GM8{yU@=9oAHz=vEDPW zm=>EzqyoTPF@ zZHJufC#%@Z?ec_Ig=hxX|2RPPe={-VQPL2dny1j*;b2{q5I;(OXvsiqE)$%AVy*Fx z*=lRUDqqk!4k>Htk{SQZykkx_c;Pc!3^^~04Elh`P%n%tN~sSXdH~YE3~e?v+P!#c zs>8mj_;oPZ)os?Ef#XTs<53Ts9=W9ZQch8hddWn{?)hr33!u-iNSBwg7jhLGXG@x5 zhDE<_KgeM~v@;IL9HKkK2Obwa!JVXUh46+MRJpqAmj z+@Ob7?|*Ov!%fMFBz9f-R^C+c=fw?{d?y8*$0W=Fhpwbn@t}j%Z>UBO<$M*?x7|x| zya6Vl@wv5o#;^%6>mP7PQP-g)hIh?L_rOZ8&ARTg)a_)XO`|bEg1~76Y}&Y;4iKlm zX2cLbV8Prze>E@OP>1|zRT-0MSD^?Cj=kh3ovu!=u(zlR*1-4N*65tC#?Cjox7QGb z8IQKc)F7VUmAB!nJN!y3Tdm`0o3S~)Zd6k8U3P24OGiSk2iW;(ikF?F|K#K43|#Zu zY$FB?{>xzGFRWRQFg_S<5~mseg%NVxK+S8*98*U*n!iS2D}F~0c5Ya%oHXGz7#Uxx ziPuiSogTiaY|&~o7hHEKG##ng^PagD_C@ zET#4%)MizWO2;vWtQoG{=eCAe`K<`^(i2WP>%7!(`&hWJxqNy;mfY(L@MkBowV%+am~ohOSEAi;%3E8{A7wm{ue_&j8OAqe?+@>Ifg&v!LFXA8U4pWV(w<`oiU z=eXittX;E6?v^%!a1;WKph6?rqBtnBbe zC-buPkPQx#TZRJUB-&!kiM%l}EuKnOqwHuhqs6Bw!mXG_!XMt|nkS5mAY)KSR`X%g z7kNEzO+RHr;`r)#4Kd?xAPHgQwhrtq&$ZY zJof!-7yw$&cMacW{(A@c=bQ@^7kT^*?(S-vFg%EfetU=wD91)huP*(o>(IWz)mXm8 z@6%)3(tifApGgLQX-%(t;`=x7?-Ydm`Yq6^c_CKwKuG61T~}lPFg$Fk*<#<@{zB2E z&I4>e))x-@S5e`A0C*2a0JUZ#Jj4Br6}#)-@a_~nwxd6h!gv=T1-@in=lyg0gM#9ot<{Lj8n%`mxGxnY$ z`=p>&2lTvX8XWd(WVo^?Csg+4JN6LB)<|$vXZT5%xjqEXNw1=o;@eB=?4y+C%$oPCJGDJgeb;nHd$S zCdb7;JjbC+hvbpZUk}p+>NTfou>G_B{!p(s$61_S+FkI!1_aJd{@VAufcYuUvP%cm zp`B|c`lF!*hsY)Vr`gBOz(Oh@M%jq^_wVZZUyq#uqEMqRl@EIU8E>>GBU#$dc;c%V z<$y?xsoD|_Pu?go$X^pw$2uXmM6Mp7IyZPgSg9UK>>^zSQp0X=T*=flfZJ3C3At3y z%7dc|W(g$01Ah@>)^i-yevG3Sw*}l?IrhFXc=TRQ7Vatc?>w7Fg#zwPIbPv7$jq4# zQ3c+~cqp?y@zqGDl`s%_I|uYO7>HS^JtHKTELYqL%W$&>Zt?lv5aHPWzBivvel)Z? zA60B6Hdo=M@DdHRJoY|YEx(yC5zeNZrN!di$NzwiDdti5pFr&G-wG}ue00hM|Ge7L z%l<=!70IH`I@~wG@ST`2QHBvwb)>xc_J|@=PnHdm65V%gOCJ1uk2RKka4OSH2}qk= zj58G2H`YK@Ub$j{{W+5(WDNkp0I&*m%_uqUSxIlt5W n%!nD|0Qvp@g#I6!(66XI?gy(f^hLJZ& diff --git a/docs/reference/transform/images/transform-rule.png b/docs/reference/transform/images/transform-rule.png index b2c682cd4ce848112a6c9658e42430578202fc46..c43dd6c1be9297ca4ad2e98b7b71370aa34f8424 100644 GIT binary patch literal 83160 zcmeFZcT|&0`!1{~qGFe#RFw@%7wH{AK&6WGDj>Z{M_NQTqEu0uAWftNLZpNaQKU!> zEd)parH2-QP(#QW_CCLLUc|q@^_{ccKUgb^$uoIo=C0R$-Sb*oQ}y&orjv&b9XfsQ z?j4;&hmN8T9Xir}{22I;9}MYx=+I$j2PGx#drC@Iv_0Hx9h_|r9l8VNkPf zopZ)z#jDMV{MCu1CAY4CTF<9P2J4{bIK$4~Fi_`s7s*?dYG$*^s`SK?P^ zbuuzt?h?D!=hoeuyoBKof%jz1ajC)OQNxR)M&pF7kW1XNdQo*vw?r+Ip1~9}(~}fe z=%2Rz(4Nehv`S3poTlgx6AUH6K5_k)aM zFU*w}zWv>RrGBWH`G}R?KIugwOUA=Px3?g{$N;SZBvpHqX0B&npBU2@o8y=2mK@SJ zA2pm1A%r^SCH!Uay{ILmJjFWgNRjxXLQQXDSIA-KT#J#ieEw3f{Sr2Y_iQyZ4)K7` z#}6G2b2xMqd^!yNn84qmLsS`5|MQ=t=!_%(^Z7{o-k|S~?aD)k6b{|Hb6eN%@WKdn zoGF@2^UFf!>xC=lu2$C2`}$(2n!;;pOk$~&PtsA8Xq!$d)6(&tY2rR}_5Sh0R3Yb3 zl#vs{M(>9fxJpw-hETG0!dirY4NrF2+%eCw8W%2GPCBZi$Io3+cy{RUe|(tZ?=Bxz zm3p)9pZ|U>#j%!M8kW(F^G;g|{HZB(n|)J^8sc{|hrP0VE&oHd{}E$_`HM+GQZP{;uHoBD_ZD#UH0 z&jY1)F_p_N3#_{&s~MZ@T|1YRfB|9_y z$fc6R=K;S9=JB#yA#~A;K%j)|e;FtrK|ooQke3;@3~tZNjqtkGE~A7<&ZYXYpIQ^6 zbe%jr82|eHyvtleULJM42c3I`plis!>FOV2coB+NvNN1N@w=(k;L#SgaKe|=lA9jH=82Lu*qIvU{TQ$@#OxJClW61tXOU5S^l!%~&I0q_&*4x=IQ==O zo-@Yr((f|xOu_oZaWs>fe%(P?qvyRz&be!Ut#^5%r3JFPs#v?Wjum?J<;x|BCy$7W zSi$CKR%Xu*Aywki2(LdeNm$?fY4+fg7i&P&?OZ-edtRoB;=O6v@OR~Mj)@1#zmwM+ z-^;l^Vh2x7f&FgP*68vEhvdUfZAmTF5!l`5VHGU~N-z80aab)P1|pTt$Xyv|3#O;y zH+nSq*|K`ze9(?g@!WVyPe zNVv+yb)-fqnS55}l9tSt?#^mRfYovnL(25Fu?aP|HrA&mHC!xU^yzeTqIqD^tQ_+J zdE&pFVa3IBH)L3#X30E3fq%?hp0eY)P(4rhNJzAp7T@B3@dYIx!>7a#J)k4%@*&|T z(7S&4spW0R*o~UW&u+-5RG(q~J*!`Wfhit5{seCA^O?h|g0+`0rldswjW0|Ea0Y2Y znNnf>Fptngq_F04tw_&T4e>FlrU{ncbqZKRsiVd_;^e-_k4H_m3I} z=B;mDq!bCCQX~6Ko)2phQe{8Ch{d?NhMi%x+8FVP>J%H1D91V0%wix4twqRHf^Umu z7@MgJ1!y2Up^g%aaB;|X!TK!M$Obd-$%5-xA4-NfMs6FvxloF-?$3MB11#L29YNMQ zm2ZJpuQ`UDow9|Zt=gtJyUTOz)TMLZ!YFT2VnsUBBnh>vjm^w1msTIM@1`fLOeK=X z(HQd>;2&p-gr5BPvcRU=!s6+L3Vo%ng(ry?-n(X7%8(m~LQ_=0pb^gCDQc7Cy#t?87CN!c^3+Zqgq#!S2*{?TV;9w9%H z+KS!%s}d(h(YN<+E|XG*Ph0;b1BXT_LVb)JjY{~=Iy#9I`HDG+|21)Da)>RD9G=O=jnAen9F% ziaCB{)wjkb$osC>`~YIuJI5#6lx%7sE59poO;d8~@3$2aUbEeK_yl7B% zmTs|M_`7jOxsWtNxTQk24Pn1uKm3-n6V=#;WCQd()9WAx9*|B}hrb3~kt5mD(+%6D zLwRNH6V3Dkxn(z97b=0bLwlPiJ9w3LR=i#ntb7t4VqBj_H%N9x1x*x|qS~Wwwz~&s1&HlqN86A94(TyR_C;u*-sVt$4^9V!S4Oue97e zVB~c1PnDFW6t_CFAUlpi4Gnqm2A4QQTfz_Xfnx=~*>(}_4COG#n8)yk1A4uA(5_Tz zjQ3qwP>PGb|0iQ9KZSM2$3gA#OB6fUc}MS5(p&+&i7Ul5o)$hQRn%Y4>{=^Hi&-xc z-c++}y=<3e=HX?QUtQp;1rJ}UZFh~Wx5FfHQYX={2Kt%C1rb~w?=wwRpa-xmKorD9 zb1|e3FV{Wmc+)g#%IP-njT0sNJ;jyl-5)I|J3Fg__kFDWSzTvRefX-7{n3a4Q;syh zomickEMNF2=LQGb9yPTtrL#I5KI+#8b0v41>3(JQ(jC9<-+Tf1IR#_{3|- zhS7kws+VRmMsoV^xA5>$k9n8kIVi6|fpBNC5vGwQ(R3$l(7%A+D(AgzTWX?WC2vu! zlt9f+o8y|U+1BPnjCH&om)T5GR!oOV5o;`#Jhre~J`y|w4P!{?t5_NhF7Tf<@YL_| zMAc+G2`movL`e#-4|~Qm{Cxtsv!dpcZqO)-F}GOwg{#<*+nLQW3@mO6UlPPLS8rFD zV(dHojiCY7$$2UJV-vcI_C1-#l*32D5!@9pVDUINMAxD(YDtKlk$OUz9W37bUPZIJ z^{Cb4K1bhD?L@hvU!43+YL~Neeinr_ODf8$mDw0BB)go9?cxAGlofMDeG_T~B_c1E#;v{WO%y(Z{70{QYqF)v|`?{90D;r`k?h_wcHbh)PSWf6Pr z)Ul5>GfUDXm$eLFKDLPhX5;-2HQQ`S3k-}qKgS(OE4-A=7WqSzJ6cKol!z4o)|V|Wx1<-A3y!o)yCK<*kRWXAwG^LPVngZjccqOMRtOo% z=PXq`T)Y)u?t*Rp;Mw)w0=6doSIVgj+*D>5JHOFXJ!!#+SG&Of*I^Q+e)v^Al(bY& z8if|vn5F5+C$3pV=^Ckdx&3t#y+8d6;XYY580+Xjr)OpZl1T4G*SA3qC4Y0AkuU6W zA`@Pmz5nusL)@^bXQ$rGRss4_vEJGbor{v69$1;xbiE~|_>Mk>rs){6$p;61ihXug z=9l&-0}ZwK6BoJShDFg>i@@dVw==F)P1Bqk*{t{!zxl#a{T_@}-|;&3;7uXSk`%AQ zkFIT^=aGUB0$~**8~EpwANL!S9|j@S@A7Rs;+|)Uq*&*>{eAkox!Q+O8}x#S^~2=B zN3$3uwbAJa!8(=*zH+Zt^26a}Z&vRa6sz1$*e0_oY1wWbKaOluJ= zNmfFXyF@@}VRLmku@9s4UY^RAJt?4J{$zKHu{jljtSJ_d)>z9il3^|l+#vhf!0YA6 zrt1+2RSgNV=C)Mm^ZP=_kYguwo%0fa@{*FkVcu{u17VYkG% zcg24m#w_a}xBCS0LLh+^S7g@beS2l_aUbk{@HbfI#^|M%#N@b4Q%tgiyHynwn&)3> zpzG^+YF}C*5~9$@DHYY71PrNcQQgwZx1tuoTYBzO^oa>L3+I+^F)~co({=n+IvLX- z)q@TX>tG-U^-inVvP~JNLKdaU2I!xq2vpC;tex!d6>r}&sD>V`oF2#+xplw45_4{_ zO8ayK3B&q%(;QY_7May(uEF|6-{Ex)jB_qtSHEOor-6)Q(wo_J7#d0*$!+Sa+9zS% z{?x`^dF9i|1(?fUa2qkCiEgDck;;r_pGIW^XHtFRWy(nlrQ&{*CarD3BSJGDJmX{@ zzl#!&WG+JHOzys1m>q*C$Pl04AMqhva#PD=VCA@L2}ZVfTwZM;nG!V7xnuQiN8lzU zaCwwO;q`|JYszyb7(2FpBpGiv2a}KW5B)7zKWhTsZ)C`LBp2Pewg3I3sE5lZZGC6E zulkS{1-!+6R)YK_Qwf>BnzYj;Z|3e@OYW-KLK`E2a{0hq`l>Sx;Jss^0UIM5);pWk zs)Fc^?I2RH(MtEznfKCZ!E+Yw!}-@+N`M(6COe^T*=ys6BnsFJil#&kE9b8O94T=a zeXZPg30E~4RKGTW-GeUPl7KExy#+yL*4vTxpIK-2M^@T;8OkxbYNv;MQefjRnca$p za~#{#9!$|fm(So9g>^cf7U0lwSD6MnX< zr!L^sUPGx zwlv^|1A3a0Ymqo#g3j1pdae?A={&_3YcX+_(Od`PU^8UUx@2=$=fpk@KbLn++jr%` z)R*a6za>e#W?I2oU9U0&G)a>qD95J^)q0)P0s$=%$nEjTRzwiL?sk?Ba-EST{`dzS zC|W>&KWy}wI6O~@sv*?FA7>=EK8YEDm{g)d8O`%Pn|fXi)#i4_Nibg(>r)q=@_}rO z_`2`qlZ%kLtm&WV0kzH?$+5syed(g@yA-oy*9Wolvd_2Mg^{n%4(hP?~dtS9#Jy3dRe1pGCJlT<1-9IRfwy`JdM zQoA~^_*TGkmzr^ExA*V!*<8*y`WAHya^8*v_9 z_}l9zDnz8^W$-kF^YXD{?7kXTwHLRrT&{Txw-fV`ocsRGy2|tQbG9gJOip2_?@P zcr-end&*yKuG)vQ27`7b?F*!yDJW6%Z~;>?X0_Tm{yvv2Mr0Vu!WpG4Jw40O{s2uk ze^>bZsMmzW9DN{K&*)_T5~XDXt&%?#gXE#j@7;ZwATsCT*mgNLJ((G%pYAypv{7dq z=2E--(tTN(v4429`*aPV+{tt^Y?aC1IEyviyh(QQ#}Q^z%F~%tSv@kQn^q4Rpv20e zwNJs1&RxjM)8-cysk^`R{oC0_5O3)j)Xd{5n&%3d+6Cn>e{u9fNAEImdTp3NBFG!q zin(cjK{xJ~$gO zw~SA*Z@Pc7k*#xExbw|$_gnVjM5?`LQItqjk(G=3OC4@Jko(C;<_Sf%I8cmR^!)DMH#9^D4FZy@&WpJ6If6p2x@sitqX@vs>Y_6FyjJOR0PtW*CX`!`y1S;%oj9gqrobaneK_suy{7I zR3Vr8=-5OPepb3qSbg~tB1D|e93n$lCz@j-#9W5&q&p9EnAiDF*_Nuqqct`5oYSd} zxux_@uI-0n?X$BzSsX!kA-h}C5pqL(SyKWRxzt|)zuStKArb0hG*hLfiV{`2>1pHd zn^8zVzgc&Gxh*s~54Rrt?3Sezw%&tq!px4;_!7I+dp9PGt+sdqd*Z{xe7%PX8W zw_lz1axWeWg*^SOpu*6DPuVb>gT`Oh^*%TfF9=EtJ8}bm01$7{9!IyGf=ca=0l-@T z!2`&0FuT*$gYW*H`fo>@V|uIyN2b7ZJjfWt!76D+2PlQ!dI62zpF!>wNmsyh@nbE< z2f*1Y+O~%?jo;taQ{At?U|Y1ez;tZ{YyRItZLs(orO&is9U0Ky-~up`7YL@C+9+n& zpE>Oa-aw8vT>L~%4;JBRt@z11E%ya)ztZ$Ixv}N$9=B-AKkH6 z26jxpe<=C>K@-OYtkqK%wBmu0?r=~Kgyofr#ve4MGGM+H325lS5qeM$G=5;`#PhGx z{zsId@D$89<*rM6aD?~5IVj>|*MRPURfVh^2d-lxkKy2m{$E#gos?5y`3GIm)Op~f zS|0Ep9D&_c$QuZkGI{t1JIevwVN90sAGSRf7-xF#@|-_dbX8!{*;Em~7Z8sgFS`ad zmUqrs<6sf?MhuwmzXt_`=f4N_zm*~5zZ&(wO-0=Q#Tr%SmhTd@R5de^lj7DnvI{Du zOpF#@x4l4h+um~zWKtnK@rjZ&_DbXdpQD{|@Te%1r zHm_skcK><_=oZyox73L51BwhiqtIDui2~@ZRDkWu>~6jj@&N>GDG&=mI$X+mu=$d2`~781WOp%1cDn)WCM zkfb$vwDVtV7bx}WxcTjr=vl-c2my_IBn!?O6W2O^vEe5#46oBx&zX!uC=mDl zo|cv{_8{H)IJ$&XpSgR*GXx*OJs{g+e1pWUYX~9!q!}Na(1d#LR?U34Oj;^6F!f#& zb^oah;bTc>(5b`hY=!TEPX#VoyfL!tek@Ok?#lr4!wgqo-F#omn?7+ZbF*+aGbn(G z9dHH71$&25wP_VW9~mRDlC&AT`K!U-jFvrc^MP~8kUoN{ct?_ZP--i#Z(@|RgmQzf z1&rUzWss#1g}b8nO5897e!VwyvXk8$6IeGSOy7bDUp{u2hVgWNZ-_7MsR6?wHpYoy zR!)P($GitmQsbJoQ08#Ffw}%6<^*cJ+%$g`OWC?2BdBRosJUJ{fcO|3CWqn;OquHZ z*ZCHzm#Udhwu>^322dcyUv^eo?p#X%sG$%xxI3 z(wcj6K0SEbXrXM?HFS+W!9rlyAIrZy$U%vGrzKVYnY>HG7P(iZZK$R`=*V;E^OAgz zGaC72;wt(zrOAaKe#o;2N{9{lWewVVuGxixpTdvG#$7Mm|F} z<1Or}ck1hjjpC#JYB8ap#k zG3pp$2ny)5Bst1QH_NS9Rf?XWCOmA^bAz>~%5w%+Zc{?Y_Xe~RiS^BQ9`oX3D-ibu zAypN?DLFY~xUVSkkM`O#?Gb7&V9W!S&TGjm6E;z+f|JB(`QGs?(y@ znEHIWhW{^OA4(T1?mSqbtb{Q09mzkJmwmB(#An_y6M*|B^?Qt%Vxo?k3e!kJ+gbUY zH)Hf}^;`OuTNOh=*#am zgszE<7n)QQg|3y|Y+-q4N8L$Cx@oCcyE5mrh5m<+1qnT^%uwIDWRjZ$qi3}63uf`(d>~nEmmVNzn_lPhD&ChoBhMZ#@{ynVNnh!w^G>Jh!qQdL z*_y;;2_uLDp&3jgV_Xr@AA({1aN zbvcs$#MwcQg2m*?_j~ZX=a_nX%vObGOSJ%G9Vp7@M@g8s`?m8DY*~U^;)hty@o42V{i`onY*SL(YV+pSWx7Rz( zd=|aMBV{2K0mOwN4)A!Y-^!R4y=$%(!=d%Lq0-N;AqU(@2L33Il&)H2?uS4{88T70 z2Tplr42|HgzU%yX*MC{JTs|H{A*n10LVk=7iMQ{S`*$3(C_aQy*`;mLV)2@oG=oYe zAOwv$npJ>;S&K4bs)bX{=|9*n4((z%WO~as$?|3898`YFpnwd@+G0hu<0Uxza;S9g z-_%&Q3KyK$gtwl!YS@{m^xNx8BhtVTM-Mu-cNUmQBTFd3q~-~Vzr=L9Tm25#AE>b! zeBE3mpTS11dfU=k9B_-JJ#%4c%Fe3tNnDQay$vGTK*@@jB-G=b@IFg5hOMVQ zXo4o7uV$p^H}LoPIr}A$B2AAfEyWz%xuFy4l%Iy3(T%6~ft3G#G0y~K6csUlAXFZ8 z1i6)C?BD^f56~@FkZg>|tshKK&gF3eSWemM$R8q&Y5<9OlLpleD1t&p0?1GpUjN}R zj?Mu95WQ&qTP0vm+Cz-Nl8yg5=l}sG_WzGXDUgg!j5lAtzIeS^Fsr#K?}4>-N9D4R zrD9_vG3E~$usxN_aCsC!)9-|>FMjmQUGK^EB-!`nXk?kW`5WN4XfwGq2`M*P)~>53 zUE&C4V&w>CV~xj}O}){f`F+1LmZ^1qh^$7YT1~%~QpT3YCl2V|$rE!VZf?46es6eH z7R`!PZaK3EkYFDmSJ?1{%4P`B?118FLqbG;IN&Nz2>C2*UJ~{(jl3)@=j*}rRm=$^ zP!;R628V624~;7!*i|2A^#wDAunH031AUXeS%wtyV)ghH*t&A)%;Xa#sfH4bGx)S8=+^A(e zo@miL$t2@caoOsn04vYOWqzM^RL{`41_ET06LW%FJH=*NlL`^;_m}AN-&7-yE=1(P z?KWo}pLh4S12K6$6FeWE@{8$`Bvam8DJc`KTX`>~`Ui9)jS9cm5cl&F{-jFu2N6fu z(@_U90jm7c5&z8>RPs}tY_?w}|KWH8se4}`iiX6|I``gdw)q(QJH%!h%W2QQ>0Ehl zyQzB+2uQ9_XyI#d7_-!&i%0n+Z?Iy5eEDzbGH=bdEJ~Fz=AlRG4ys^si~=}TPw{u) z2~?`+j;8VBJTb16x1#aUD*2+dEvfriBf$P8X;N+RT~A+B&&A$ow=sE~mylzv(NvIb z^YobRfo0fV2nkHsn|tzOlvganWoY*MYdQ{CF{FsY$REsHr@@uzx304uz?Z3p+$_ysUC zqQnxhgLW%U512uH&G>^Bvg{bJbxyN3E`I>!3&4DZA9xOe^7f~JZ5rrqAN~XUuMg%k zlGOV{7YEZG0+yMGi2Y4afx#^V^Obl&f1_IN6&geVVr8r(=*0261$d@lv{%xY(!G3e z#Oa*<7!y-|)D->Z&oK6hDDlxqkYR}g?>>D@T5x!PS-E)Sw=v?W|JoqJ^3ndCB@&)n21+KPX{+FR#0T+wkG>#M?qk` z@?9yJv{)^_Pj>8&9?l%5(+2fZGVc5y{h~vBOy;3Pb%}2bv$((+>7ArYk}HXdN_I(^ zY8@gIk;StgX&K8A)1YYYCpqf33IoxaE6^rD!XxaZzMbAbp#?>zyt*X7fm#Nh*Mr1p z^f+@CSLF#?O1o)kwX^zF)dMT-yA(&(H_uxicB1eTOSNGG9FO)nJLAx2+Ab$Y0=vJy zQ(<=byj$(QRF$jsbGB`i{FR2Co(f@;XeOm8^E1h`J4vw|)cJIMCJ+MGe-R+^%m!vb zF0NbfiZMD`zMJfq_pSFzHxISCZZ}U&&>R!lef=~daL9I^pO#~z*Ix7hFxs8GCrbGh zxy7)F<|x7b&)tf&bP4 z7K4;QYpv$o^k9Zkl$^;zJxL%|#j9?6ZhmY4d$l}>GB+Fp>BxKN|U4tX&NKw(-x$R%yVm#SRDBJUHA5~){OXj;0rNx8J7CKg~ zS2J5LBo!}z4P&Pl8}pyHD4p+&HL-BaZ?Yz@VLJH@$k#O-e-VXIP0x0ZSOX;E4m?e^ z1VhdEy^}6k?w5ICzq56+0}-U=5s9}7u3A7?NegwVg%zVxv2rgV6B-mo)Wp zyf5lxmGZjbBi&cK2$Jb`v(?3gh`ri(Dmim;rn4~!)>rt9>?~vrhB3xY&a#yOjUq_9uj=*TqQ6IC>jI9a^WApz) zSLdXE3r035m7CvZn7eC%&?#?o1ic~?d`C=Nw2~KZA^Gc;3`KPVh&?Ty?EG5n^MrSy z)YsEJ^bwvQpfxkp^0x5D>n-arB=qy4$TOjpQwi9K3gig)8To~bTS!={+t}s1NwyZC<6q$Yrg-vgFmmiGdP&}5(SqV{ zXZmEtWnkV36ZCIs7`k&@)h6?Y6?%QfV0Sr&#ItsQB`a6Q?)IpLcaQ_uKQ=(H{_If+H0XF*!BN&QS3 zcRt|>H4or}huEBDVz;)b2Rt>u`wLTQpHVIr-<_MJn z$cXh)rO=Q){Gq07fpJCxRrOkFtN|~q%qp`8z;4%(e{btMeSeJkITcAN=Qr&{PPzqpqc$324PK;Vubr+RWgt^b1Z!#r{Pj ztGXv1^{%b30np*`&%v@cilxvH*=+8z;iHh&AQMDdqIoa^fHPu+ zrR&KL*3dsisLF8b_-$}j&{3zVgj@ur&6|C0Uo;xhOb;q>uLz2?{FfNb{4`xrb5720 zNE%mr<7_D%F(;*_XTMq3ES5t>OQxDrTs!NSl6gK9_`(b8g+1U@Mg>tNB)sz+G(-)h>RNq%e&f+S5Fyi0h7ri~>cu9AO@~Y*U5t?<)tbTQ|P>_0_f8JZn#nLDP~6DWDxeR^QOl;};3En>4+?u92)E9`ykmv|(0? ziJ8gwpA+H7Yz)26iJ+5wYD|LPjTZyaC-!!9%KzL2@P86z1>wD;?qK!Yw4`Z?mUcDa z)mBRcfaPhc7+IgrsQ#*j5?X5jS++)};HdF>^~52Bmt}#|SkB&WI`7Q0w`cH}wGd zoHC$4qm^md>DCI1jT)5VqkX+HmCRSmtJgbQM7MyZ&znM#K^L|`K)g`hfZt%|JBv(> z!(%_gSd+k{lK6nfyzrfrLA&&^se)HuUxd*mv={#(1#LvSRE(4Wp_l3t?}?^}76Ql; z(j!i$Ycm=84%@kb6uGq!_b^%zsA~pbGfi*HMs42bq<%?2Qf|9SlHb*{`{IgKn|OHP zaRCYILv}HW842rJQj|AkNxbYqM2Fzwo?w*JYMdkJ($?NSh8-Q+SYDZG=le^*&f$CM z!>)NvCrbRPdB5`i*=G=VNAW)TWc)s;Vi@XmDc(Fl4Wfr^(O+Hh^dguK5|tS$L7aLk zbPY-igg~#%p)Y4H2j}5W7K$3a&*8njMzC;!zUdFv!bmR^0WTER)pG?@8i}`d+v#<%)KK zRkz)b<*L1F)1C2E&vyzGN;&R_wc&rbj#M{C3U?VU*6!R7N>ANcp4v_w*yY6p<%R9b zQJ5@14`25trf`t=)1y4cM-S3p=KSE&Fu&)F~QO+PgV*-w(f&Y2VSJ%48fzF9;x3F{Z6UTD$@*F2FgOA)qO{4_1Z^CEJ>b) z)v)Q6bOGcI@p0JndYO5f2(=Bj^Zl&zVU(M-!}*{s6WOp0D0t(msDhZs?F4mUqAFhg1N6;)FwK_Yi*0$xOm9^4dMn&~N`kqK@?!(PPROrq@MX$LQLsg@g=nypi$`a2yJ9iu$Wh(BZ`BTNQxVOE_&bt(H zr952<{V3cxAX8sFFn{trwmfxb&a-nq*X^A~eq2eqmpM5tmD?;UlhlreoUSW67(>LX zh>%k&zVPO}rizwX*>o(G(sxnu_g+nOF8{teq`J;ruu#_LQ$_I49#URq|D}bwkJ-Vp z`cCbxlgL1Ki+teg{GYdRC@py{wl$G#LWcLE)YI`0IuWs!xAoNe82hhYDDSAo3o11KjJaNLVXg}!r-%B*>kSpO7c3kG1;j|e+E7M z-fl^1Ln|hAY8HuiA1m6A*PjN&K~bDD6eU4gkf)p zqfHCm9@-1Na<`e*B!z&(`b@6T{gz19i!`j8q-|XT z30q*gc=^t>Sy;NHoG=YfI~^=RBdgmAwiU^c94uI~@X_B)UPJDUpc|6x zx>tmdGZiHHjPB(Tb^dFSL|k^sd{Ka?qhAU#J)dmmn9o`Auy2gW3p$3oe?(|GQA9VN zYwjx(gy6XV^m1)mglRrXe5}okpUh53ajoV`L3<<^8&CjT@8sfCc-o;~=VWxR#kLq) z7`1b~OJcPaH?_Mqc=7ZZw10=B-A{ee+Pr~XUILfp_rxK)^mRhj)Vw1eDeF)C9-4v= zaHO>71m2U8i5vsrwP=K8*~bYE%xEUlc_f(_!BJA>r1OACpFG7!$YDk0HiT3fh1o z&7YXRiW9lDC_fY#dsco0ZrqZcVfpPeCSou9u5hy#3sFqIlX=xU#4nI>rX-rK_l(b$2pfk&;<4H!>5mP%Ue?p zi^xFP;QbNy7ND5sbgOO|ag06P7v4Z1Vg1nYnihNeKR z&&X|lm%vX7d%n9^yWITgfu|nt_SQ`LM1k|)x;*(KdKNQ<{-*g;O$;V~k(vjoszwO) zq+nbyY0QZ=`FXh~DrLub!UyY`8UQXU2!VMioitsY0^!>$LF7WZ1^-UY2|7)wt;ot> zAJwcA%{C%II+^#`tw))WoA2z6TfDQNPIxJCXIU%MnyI=QhMhFJ!x>{>)yK>_zV1sHZ<yO`wOEKrQkVb=KBF;~;)3m-7GM)>_~O7#3Xl%Jc%3X)`7fxnqRf8@>v(hWct zvfAWNx8NY27y!*Fm`O(s_|v75)icF7kM=u{FY@OYJYx!9(mKWXVSS%;WErU2F9KZ)sB(~wmA@N^wkr4Sc^}x-aEmaqL#d~{?e+%5| zbW1=9HZNaKV~}3@Y_?9Wo$?6U>QPO9d>Hp4l(0>I#`#ObNnSn^38~QFZ($I2&;b(- z_C$C(Eug5Oe!BJzm))Rymh}Ux`2|~2<(y@npP#ED`Y|P zwpJHCu@A9(wEd50(WKbIyR)_VoYG!W-zjGDIApY78Ne=+itzbekB2?}28O{^gVr%-5lbn(&2&7bKKRgL zR6Whq8I7x&+XY6w>bam@!1%>!r-6sB=L_KVjvqYQmaEQ(<_C$7>_9kEp0+m4v~&og zbO({3mfXUS;&vfMX4|hQ#;tNWq4H;Q!xFAR#&e@J=pjF~%IQ?HaGlh9F8S{8)G zYx!eB`=qX0=IF3k4A#Ehw-}&?P5j~(jNC_|#(RK?H^RJKjjzfN=ik0o{CM32>;^yO z$ycLdz`B4|6*$kQO8a)HyiL7GB2@TVGJJ=|=boTjADZ!XZ=z?kD~;_k_BL>>vTiwp zD5uB1DRiyfoilVnw~o;i_?v>(3p_hTo5%)b6Nj~fYaBp>v|K2nv)9h-WATLG zdAhk$rUU}E;5PBMEy!){sQzVxwMC!o^hX-{0BYH69K4w z_FCTh0SNWDNHwSe^t?9Pmqq*|aT&b;5Q`tk8df?m27XQvp>o~l05pFnL9QJ?7_3v?cQ?`Dt)t20T?8no&OB*m${+aa?CXdF z^S51ngYuB_PL5lQ%j&}GsYaYaOHoeBI}$4|$&dP?b1>f~s((MbknNM7PF=T=%%3bx z-A==@#5jRfkk(x|IFs>H)L7t77D)gIB~0-){$gap2m`@m!&*y$N zz9V7frT$@7uRGk2NAy5N^>|1edgs?f9{6VOv%I*eG8gQM-L86SAD5G9gyu(`B(d?_ z2URpMjR9_lqb4oU{E%a>!#4$0IewGq5Pja8px(StYKClVJn`;hu958$i!iD`-%1AD z!js@Xp7>xQc=6MTs%Qsnk%rgY;8iIf`$XD79?rwCq+Cs<-=_<|cu783F_KmKYV%{u z;aMST>jwBcwQ>h(yvw()R34{uy2^h$;()!vk)JzS@`Kf+@Q%P)pgp zyRcW>R#sQn?n7EBA_Bju#N2b3t-SP(9M%53{KU|?Z#y@??Fj7;@cz-!Hr_XK?eU`w z&SOO`>+6q5v;Ewpf*)F5dZu~$qb;{xem@Lwu#U84;GQWb@uq`>^utsdfI;rWWM{As z6#Um(T>;jL+B+xbfEXSZi2~Nj;e*+Ml958jb6~CL```bmbS(m`RgSw4{r-y79tqN)}|5yC}x8ecL z-G6oJUj_O9%24PQeQA7saU!CgY}Jue9FP6Z&|k97KWk1~A!Xm4A+Z%Fd~*J@QRUS1 zd=@Cvo|bGX{>`W<@LsqCT6b%IjXN#3Ik`1=yVXbaST{Fz2zOqRonBy@hu5g^A#pl; zL{2WoDxwzTwU*pGGe`m><;^_wBih89qnOzfO@m!NJ$jNZ>-*YVcE;HQ>0$91ICXi7 z%&j#4?f3a5cT;l@So`o3;PN3x_oj5WaBx$Y9+vzqhT|GJTidDJsHW*e3kyqW=KyS2 zsIR(rqy6ymG?vg3DeU!HL)^SV-}}h1;CO6;BGO9`FK2xJ^4<+P>)9(4G4dzS#`jyQ zw0?I5U*e9AKHYe+xoWWse=@kU+R4f*fK>0ewW=<(fF+D?FfwX~`M46W=IBztWN;^Hl`xT{#C(gTeu!MQp`)E{tQ??=(V%t)szhqv51>4G6EU z-xq>!%vj-PbxT=(=c=uj?lWd*8-*!PCB@$^4$r&A^l3_cY&EAgvsw=2Aai#rWhis0 zgRC#SiP5Am;StDKXtB1PFBKT1bDk)acf&0xe&F&)>+T<4d&*sEK$nFG63zW3x6fWW2dYVT3Tu1a_HMfh4`WMXA;O3$^jY9C8FUwUuQ#BNk4V~l zr)3Q=_^NrpW~nW1YvV=aZDbUPw*BT_^A3TFUV}gWYMWWdI_vXlw~M=DQY&7=OHD!F zgM8HCBQz~0L-WbJd`Pcg-$3HB-7;vRXrZF*NC{{XVqG1Yv6wE;e&Ev1ml}1eV0n1< z^TK9K>8jk;LWHTTb*X`tMIe3+=K+$fMn&4LL=(0hG^VG*jWRpnl3?iZTGkXgp=CztpUn6}X>yD9JU!R3*$}f%rdU&8|77VS6!veI^E*1*9Ic4^PIq_p0s1kzAp%VEj2m(>4C9V{p%Tb^!zyB%I1TQh4FAz(;Ex9_lKpE!9&_6b2^ z>67O~A|SYg^6fyGpd;%Q0j1CIq(Mt;=fvv;y)H(J_quMoJJL(RSH!S>b%U-c8mlSe z;7%Pn-^Uz3Ryc!Dis#V+L|`QVlkXeAl$`?xeB%^X%vRABd*J&&M5 zcxfYdk9b&`ATB}+Kb?0bE!04Iz@^0N;2OYU5?jDbaKQ)L-|1)eR3V@zM|pb_!qYB* zXrzM=W-5^k{W7;aTIwiYTdFSeJEakSSyQ^G9bK^HQK6-=wyRyT20!1lr1^1?meuQQb|*X9j*Kh25yBkj^4O+x6}yXp@x4;NE? zTLP%zt~F~6_`%FN770HE^8lAZytyRCaptUi@|75wm5jI>X~l1w)i}%(-j~+Q`qmJohTABZ-%;mRHN$>o> z*n7*cDBHb#R1gf1Fc9emX(gpWL_kVG8U`h$m1c%UMMXloLAr+;kQ@+cn4ud+siB4e zhR%Jn{%gJOvmQA1mwoL0VIR*I4s@9NzUtTK@4N=+?SRy6!ws*Q7h&9EPcGpRkjwt% zhkxDO-tQ4^`8FT<7g)8SItnqzmHpz{flA_o{esrv>JXbHuJ0{h?v|2TBP%HtL!-B8 zL^>f1sukcQ?IO)SB0p<#yVh>8&5d8s!Vg5?X>h&OJADV=jiHzNbuJ%V5iQ2!hEf<) zQ`5p@c=yazBz!M{ViA1uLAPgy=^$6}&OiBf&qp_=WC7QjNY^RX)5uE7%F-QIX&AG@m(D`{@~p=nHZ@TM%SzW6 z{Vw7E^#{3;zj5uZxM<;OUq*MwjglAj1{NIdBWqGVa>7y`y3oXAzl1g0Fg`l*gWg%B zK~ug$aI9eykCYp;H3CUhKV6|XsgTUK#g+dtegd=1U8CQuQe`*AXIN<`I-u=Ic6o>x zJmr&d`}u%(vR2WA0%oq9pWUz^*(N1u83UHn2oJK$6{$M!J4I}3f;`tNakYqgL{-}NPI@-MdTrH%yi_< zJ9UFRCFJ*)>fYRD_pvA36zZL)c{z~c!Umj|hCG{Sv-%x?>kvyo>gr*A44!tbS%Oi^ z=)?aoctsjBL7Ol@CATaUor5|d6PDbCBX{rQRtq$b%HM7%VZ7ictCUA~x~&HCSR0*KAE zV!z^S>zp2C!*b&dP2`Za6lP9hOADtVl~w0GiiK&mXceW6fAkgEr|YtB1&(5}in^3A z4x)44WEi|4Y&)VH?2L3O?h8p*49WppBqYCo@4>+>H6`JcOU%M!coM{8P&U~u4DT>V zY7%_3WOT2I<^|=;!=4*`t3$PpmHt9VkzHEDqW$^KeSQwZ46rLwrLNMkcH2@}P3;@{ zBk6AE$fd+$*?ben`t4AlsAH=G6@f;M%Kb&!|0VqXk`(trg>hOZU_r7`PKqO$T?r1_ zsr6M0or*WFQOyyrp293*R>g)Z2Es@q2UASi5-aVy7D=)#7T5c`H9Ky#6s9HA|B$sr z&$F6z^?tbK!Y78xl$v(Kj-k7&7a)=mI~jbrA_qew_4qcJanOCb@v7+YoFNfz_ra+0 zgk)P@+TUaCIq_yG)(!emvS>@3>O?3h#dddj6-?y~s&*}kzG{+H=IDXqU0x6m@!caX z;=A;QI_@3)7Ms2DVI32*t>dv9U;bMHfg>nKo@;rRqc<(;%8%hfV}7*A?pg=M;!T%( zS=nE5bi(5<$(45Nj1StLsEw!d4|~~i+}YC-%40ao-4k3QRr>Xwx`?Z{o9cJ32cp0*Qfk61E)=<{h2Ok zvWVB*pQ~B!(23;Z%U4PF2nV-bd6ric6^~bYw~?+)>X$dfCOXzMRjZ3ipU$X2y-{=_ z(pX`Yk>%$a$Nc~eFZ+_St_;Ty5&MH1eOBi8a-sBxf0wekWLYn-q(PbbN>~CqDV0(Q zSDqLbDmo-k|Hoa+%CMPk`~6)s0cm3qs2pKX_Z)4otF{t)B}pjGJ-tIvDZ75vi;}c= z(^YSw5<|tEG%OTMm88`F;{%^Z+ERZpU6xkcuun^M9}NQ9OWytHQk5oUJ5kd zmw-9)M&=;O@Sg{!(lI4VRD64CH5kdPK-A?nFtC!7itgHVUJxT2#0{_1vE$rHX{S7n zdv>#|!fcDC&8)41=Bp^UEhZsUrB{s!()PvF>iGI7{Ka<;&UL-Miag zRQMzM%%)*Fcy!z26a7UhklQ5%Jg1mRixkiAM|%TaD-o7f!Vxnqp}d{H4b|0mT(>pp zO!MWMk?;eAi(bB65qmx{)W{Qt@a`Yon4z><}qYYgFBi$H$ zQ$pwBOd3a$tcy9`aO`4>1^tP~*vV|{u?Q@79wsTM#gGzc(X{jW4^VeQnQQ4)}cEA3;~BrIeEjQ z^j7ND77GTJRZj?6-=(-6J)9~p!k#bHXY;%MW@SJmoz?W6l)NQX ze8RUWQ+TU(e&|b|+64fue%mQIV0)9n>PXe2ITmZzCQs0QO-4Q1#iue)tKl29aQ-13 z$K}8IXLtkw9BKYAykl^uHSDtQQ%4^+GtbQpJSWSH1AXb2%?GYoyBJYDJPzb-SgbB; zS~S=SDs=e6?F#kYXnlJ9!s?bTDaR4K#Fd!4BwOmYkS8g_5WUUu!6f)`!PjwQ1{5=q z!arg%FvW=^&9#1jal36-Xv8_YZ>Gv$tE9dk8h59IqPx%`FE6j`-2l7T+uvz}59A*@ z*F5q=)3lV^8N2*mU1$riq<$iSragT(RCDj=LXWa#UqHPk%to6NbTnF4pE6$>a!y-w z=*kA!t?fDH2&aO-KPJq2zkh?_%^y5uj@RCXRguCCm-~5F7KK)Ums$#Pt@%Z9=|6rJ z%NK7?WstHHHSi}9j7)0f*%BwoUP>o`bg!${7u&`6>yy41vQ=lbpQ>!8rZy}yf5)j? z=@Po^#=J|MWm{is`k_B^exrPG(qc&UId{$y#(g@HO3<=XwR|&YN0Vlw{{d9A6EZ8a z&Xw;pZ#J7IMRtcrFPz>wh0%`X?;c+|e$6$%D__)Et;rGPXnpmretIj4&e)!%C4{H= zs#fa$lhg^$2Z ztMW)voaB6znXxd&3v^B#03*&U@0VskIqbp6uc^@|VQPmA{q$D+2lVf&cl0(Kk+#Bkc19qxX|-Kl^HY_HVfh zzp4N%%#VsY$cw&9E&$B@aW++OPfk z*d6&}i%~I@_FG_wbwpAU7pHX)o0Rd~r<1Vv7c5V>75@GvIyNrO>lP&q%jfH*nv4&1 z0F-L1)vm-CAzDx_mxbeyY4V3J_~7PPEjE00%2$cGs(La!{l@t9AMKCMB3$BOZrHve`eNbP1^;B^KRD*>cBlG)yqcCj{zmre85VG% zLuTDl?$vXndJ^P^5U+GM`9dohXR7_<$wq)?iLB9+m(q5+r;_G1ACSF8QY`fEf1m zb`IV=x5wnNjoQG6#ZH>{&YdAfKUpC_8wG#V$qv0}JjF!hFP)zrVF&+lR!ZGy?JA?{ON-6{La55C#);RdEP26hkXj!8P?1Bx(v5s6GGnyKha! z_BTJcD|8P8Sv==uXP+${=qC;GzWn?mb!I@Ys;91cLkypTU{coZY)CQ99YZu!2ZdKnzj!%y8e*jFXt z&Tvf&Oiy!uE}!WC^{pIP)1*<{e2_Dy8L+{>TY~SBV=~+x@fz=}N!&r$H}?((T$PyOWR4}3F6L|s~|x+L_8^#glsgZWBG3{vRJ&a2=svQr9H<{1;x z=)Y$LF8SwDD5*)=Wn;weEA|UM)=&s$efI6=^_#OdcjScaKLh@QB~ia~e8zQw2mU@oHeEj|&7<$vC%w4F5gF3}8@CqHtT#eJYMR}; z3D+yK68S#5iOL_+UKrR-QuJC^yHvcgQ}q!9KhU{#t6P^qqmSa9oUl8JH0mOEz|RjZ zJ6e3QWJ2L|Zcb;O&b3g$cK?3lOnGi207vxNEx;p?3sKg-=&J!|fy*PrB;onfBg+_Y zX4s#bX;NJD*xzR%TBxA=MG?;@M!=i8IT-iy!pA-W+?RmaZsUu_!>j~&Oa#L0+7~_c zpK1QHn`av0pF{hPYy!#y$gTgQg8tF8vW)*26oUWQO;ti^_y2GK{Qts+mNr#~)Vk}J z9+7Pvlpjz#bEEaoj#-k-lPyHO|Gz{0NJ#hb@?fg67bpO2_lt4Bfg z%S9QvPo!swqdvsotqYu_Mg`zQK2yy*7d%$hXsdIQTBuHEOB%*GW{YqDE%z= z?@}QMyD)L|0!SPM$KCmGK1^UFr#nj=WhCdZT`(TM;IqWhugv=wE#u-@;)o8T%6nnr z=njxLVrEykaA|`8=X3FmmCZ%z_}}S3;)wDgC(#8<$0&a0Un)vysa!A~Jj%1g(I@2> z7cE^M6X3OqkH;Ham;<~IY69n?JVSTq6x4&1%1WIW=mx)i)fY27+u;ERAP5SEA>4-iuDtTBZEq zgm(ED?i57bfANzWYmO}h)jxH^Ok*UUnm!Hyy5Lb`WGx|)_K8sy3MXSf`hv5+T_0?1 zY4Myd4LoO8`q;>arG`$X>hv9Ii(+~7Yj_lk+!UeGWlk0h)w?ORZ0zjpiX%_@m60TL z?2+)q<(GM2n9b%anP8H)$PppCuc!jOw_VzZKU?sEO1wZ;{pJNQwZ&W4R>X$DyK9NF zehE{B{2?9+4SSp1kXYS|%maZFFL|XLti_TPMo8?tU9iqVCUZX$zW-Q<$Akx4e;5O_ znX~zzzf$3{P3WmEG9uUJUNYJR0hghnx!~f`yQ>3EQ?OI#&4~&JHZ;tRspeDDxa&&N z(LB5;yrM$rbI3OI1OWNX;rJUOfNRh?7J5#feSiER%d|x+OS=F!e}pr5=H*qVh1jsm z%*<@BSQ0c}oAfLLa9(3@-_$666S?KVJYJwfvZK$GsMa0(4cc(>G=|rZFHyu^KwjT0 z>qUlO=6C(L?{dcnn#D%?P{l)l!f+m`T`<3kI$iHXmAMw&pcNYJEHJDJHFBMw2eo;- zyRD70v8ZauAYBeIQmoV6`~W%R4cj-?PEKW$?x_0ytwQ$U84~fZ#AKKDx#W6miF@)0 zC5V<2va|Yb2Px!Qy*E>g@KG0?EP-K#cVj&TEINg{MiJ#ELDDjlb^Juirl#qVdySW< zj+gI^+3;Xqmb{XyM%~+EPd;^QFPE<`>x$;)s9*b`hCep_wi%{bWTOqx2X>Ub{llzZ zYWm?0Kj`uDh|E;Q5XFMG7NVd~R)X3i1!!qKI^KOcR%xF|RcY`v|79Kz(78hH%6`tf zXxzBIrx+ayO-eI?IivaDkh|AQ#yURfFq$-Y&D2T1!_yH0%LFoiCNWx{>+nn#DA{Kw8BDTkOK`Z^SW-7WkS z+|UT>3xWilXxW`KZ%)FnlVs#QM1eSIyDd#JTHttka;)w>0jqy7kv87yD#(F%@VunVssh|E2umXL zb`UeOw%PHXY;JM^y+@C5It5s?It0=zO$rN)i}lN&PJ5$y;VpM85eak;fd93H?=Yock?MJQDpMTPVKk*32Xa!@g{O_v-j2eNik=jQcUSLpK-Jb_ytQl(-y#5!dlnRGY<^Gy(Br+zSpPd7 zRF;%*Q)DKhIp|T!0Cn8=&<16QpRsQ{nlZCo@Dvd7(F6i^lbha`y=;C;iz8R_!#Q+zF8}H zH!jf;Olv*mlmMdI&OZ`xY;Xl{%(P^=NKT3u^D7GB=MMlZZV}_l zDX^8E+!d0NtnBQAeC|@W+rBC;YIixhs99jhVqNRqua#E>qJ%CA5a!>Ipwet}7~5cIFk9X8l`S+aPp_5~I0 zeoo1lpB#Wf-!xbq93*z6)Lw9=MDSlr_lU zE$M3`zfG37u2chd(v+lV@KtVdgJ`pdFnP9hYxmf2!7==)l~4tkX#CJ0XHLscqvPUu;pJ{09cJ}YGUOyL&I`NSY%L&&e0A87p{P;n9 zilMt<^JqoT$XBXz^YJaJ9Zp1ndcMqJN7H5sRdy~Xr**k=_jGT^Zn+BeQGbf2_!fUb zg)6{m-VCT|hIF)hM8{~EML~43%hnJsDPOw)yk;nrg&yLl$3 zE`6T6s}&Zut%pxgVJUL@QpNJ>&YM2dWi7Q=bBskUXx7kUEDZ8J&{2k%C%YNgSD)0_ zK0)6c@a#+RIJ$`%C=|jOA#)K`r57I!23MSZzQi$ zUVSKos>nPlF5{69M&V>9U6jf z9@V@7F2#k~jg>7Vm^E{|_8+H6IpVlD%9E1_D!p_K2g@8V@5*07LA>Rr$wvbkP+NNl z0&B@I2r}^B#LRx*tLc#}M{F#J(P~e$k`m4D!~;u?kt!F5%LN{!V;PVNoGyGY*Og(2 zakl*9u2_C_H02vKa90X(6fba?H#@87@JX8O%@%&xu9RQVBf+Ilr|1 z$yqsCe5?k+0&|-Z&oKaQKaT|n5%q9|V)2Tiw{G3?DmMojLUlkEw+S{>IXUxmG^)D6 z*F94bBQcwnWuBZQM95w1_>GOz0ddzx0wP3Z!2ww9#aV636o&ObLet2E&4c|X(Xde3<8Smw| z$nU8Jja893?L)dIOHo-g;$f3csbV{;yCu^mx-Q#C?k{Rwtgj25rs#fO{d>=opKLMV zPq#g*a!V^uZMVJ|w#{{xu*6pr)Ep*39ta`15GJHFc5aMjNOfIKt3U@mp+8)JsNUIV zOZAvvLIjdYah}wHD^V%cg&Oc#~c8#FUu4e{Hfl{C(G6uMzC|C!|kJ-;M0aVZyQSN*f zdTNr|OKcisy5n6AF4D{@L;cZxTE9@(SYzIySq(7G9wRoApP;bsGpHO!Ep^;<_H9qZ zDxr&1m|$Ec=091u%MNZ<-g^%)+k5=F?sv+6nN zdh$zBlyy_KNtUe>O$O(0tsD>6s6na=549JLg*tHpmFIG3j5Piwt9jY3HWoT^N9`PY zSyHeqm7wOK+!w|RJtpKO2Qb^AsDYK?(omo;Nd-Q1=j!BX_>pyhQBD6$!||axz*}kQ z+V^>Q-;F=*bbpJH$Ri^oegt$V!+qZ>HKY0Rg!>{ha1YtT=1W~&MCB2SaSt88)H`Kf z*%2b~UZYxp>6NDIV8x0OlZ77iH<(dY&o9u-um2H>fABipx!ya}(QkfGOQV!b3_1dG zj})?T`RKCR3qpwH#7AOVpYqPGCN`Wb81_MlvZvjoypbI}C!fbH-TA%MQG@W)>u`%Cfk0rx716D@9PEQus^7*^R zVjjoUg*OxmhhxMPT&x)&A7#EDza=HQz*5L99mwmw=g_)BH|di%byNoWECpJ1p-Z%H zp>UCB2#4W8Zkh6AQp`lBpoYx6>mw$nX|4>Q$)RrZGUkm&T?fkNPgQtR%T&z)i2@92 zVt&`*tpo+1kzNW!1otX*+HvmFA5;GJS11`#+*tK%Ta;+8Ei7MK%~2NXSKytp^OU$k zz&18md@N_L9yFQ_GXYsr)kC(sSHq)Gi(fwqyy-2yRO-Ds<$c)RBTDFFc}RPxec9%$ z=i<%V`13FfG0JNzX$GdI0bl6^4YE~Fp4Au)>I=QOd*50k7w;JRZ6DsfVaPNnZ9+*J zCyg=eqc@FbGgfw)o@B_fAU+jCctJmZ-dH2Z<#x4su(s3`_#_bIA8|#wwNARzE95Ro z`hIo*A#o=FpDzJ3TPJ%(BBrvNkYQUQ$Z5cA*t}gAJx?5Wt<|XjwGUtoBAE@nj@7@XQ<@sk9p~7{f z@yrj+w;6=iAI5J^jo_iUcRcIPrNU)j`Fkm<5_#R1bauy#^PW0P^FVf2zm?5*8aTTH z8JmTCftp5tQjN(Pi-KPXmp&k@v@2DqK7an)p)IDLT<5wX0?2W}>M&Z!LO1t0sKQED zUCY2A0kFdg5#55z`C&kIB&NVGL>&zGbdFe^s?q?=BGY_l%(lTGV-E6LA}%75tOFpT z$a>S4uEQ_xy;i5hVUsu}xP(t+uoHTE68V_DF%KKu4ffau0!LJmFAx-P6d3~AaBsfL zH!?K2){~BAcj2D3Bbe7_+-lq!GwvFbj0nl`FHg#zs@;uiVTveZs%-gMFYVqD!R8DW zL!Zka09E2o#QB7|Ec(|703bKseZ$JHJi@`3jNGbsf1QpC+WiSg6(sp+LHUyS8R3uX z=i>oA#+6ct7~jX+H$OSxB^wrb1~=CCzFgSayot~EFkNJ&&uZb_6z$ZZd<^#fyBl3JbffA32!HOt8Vc;|6pv8@*<1l|3=`Kla@+<}&!IO*4~xvrsYhm<6@y^_^Q@8_hnN-@pZzbgitBYfT?uQ^a_-33-54QK(G zj~tCViXf-tRv@Ab|M_8tjoo){2i@uZD^tn(U{2n;tm~+R^{-njyLkN$zfL2;Xuz(! z_c6<1B^|^9b#g~6g5Okn=RK%-^9e9AS$6z=w$wY^2G5^G-!~_m5D??FD`#QO&6et` zJgq`4eTyMy;C}b+A(ct#zQXCV|LS4z|9yQCyO?J=sFc4%k!=#B*sWua(H(ygmO?XEp`WZ9#7mrwbs9g2EXNRqCCkc87e=jU6OW3waJh@RE$%X@$_eIwmEQjR+;X(RNcSUO`l9RjSIR+wTbUDH(oZ=R8s zkd*#ebkT&nazcEzO1E#;vpX>YY{Wrk+W6AWdRK>B)++2JcJwK@-<)i|%zRrX>?8V0 zt>dpjjejIpnjms@Dwg?b=#J5St^AS|=OX)4$5cC1^aj=UH{YKOa2H=oy{T?f;OtC5 z{>cpwQ(b3`Ok92}jmezw+C8e~5h}Tcr0bKr{R03u%@otF@oMF!{;;xVBj0d+cM>&0~~i zJrOE}V9@=UMH(+<1-MN+3`(%&OnK?;tAhU(5dV~yRg4>TPM~{q0I8?ZXn=&pTaDeh zW3`~or(YOOqo8@^Obq>0@Y#BLk$UB~DiE&Lr+B}z(lrboo^{4lorZtNcFz6wS9xUg zBB*FHC}pOCw@tuDQWpnS9$&jkX2J9Ed9ll9Nt0`To}|XE0A92Io7?S<4ZAD5R%(Z& znMw)z-RzTEdRK<*_VzLgygou$){p+W%ZWgL^w;vnxZ}g)&-YdZ>IYq32Ry-1F*QZ0 z{i~9HP0Ih*d%cyz3Lp4%?dhvUsaJfd_kj@YYJ}iNADb$N|4IP8&+)52KsU{OrMKkJ z3_i{t3Cza-tV-g{9BLE{D8@OMtm)&{x&+8hymmsf|?4ZIcz3sU;Sn;*d=&{%2=R3`?e+}M~Cp<2E!&`j6_p>h+Ijh)Hx%cm0 znpL=+g`tZ2U(JMn`0Wzk)GA-N%(ttO>1Ll*pQ(P8`eyp$?wO8d3?sbN{lHsoXBQ*c zV4QN70q7Mnd=hKH_FrP}K{VHb#Xa0_K44Mz5Mk9gYtNi}%qSw_v`|0Cq0UF0qIE7{ zIkRfd0kvC4_SbFvA6wU+1r+^_0^W)LW!nF{F$6;%Wak3Nv#L_K3h>Bkf?1byq4U|4 zujOyh|8?sBV?`Ka0Y&X}y>;n-U-6gU$Y0J%{jYHR=<)fA^$U< zzrNPr2ta0ZvE9pIAxGf%Agp;06wbjy{BPg>Bvsm#vAYZ;_Btl3aX^dae3|_r=@^jp z`S7B_ON>*irb5VO@VP+}q*BPi>**Hr3qaog{T}}`Vp?^L*pS4^j&YJib98ZigxCF+ zgHDcp)`t&4w{Fj5d6fRt%vjFT+=wcRknwFulVK4(Q!|gX8HKk?R%POtt%Jx6O%(gX zl6o^11W{&YK;8`cm~e)Tk8k09=j==`;8(;1?e^-`+=k0wq?HC>tHaK*g5okld90M>q z$@M`or~Zw}CAKIyy*_F;lTThh(g^2F4_vS=l+t zHZ}>gs?w|!O$Fj1rDkTo1Re605=Nz%7|n5SUI{qDsP~O-Yzheo@*9$s+2+}pp>s$kRKcQpM?e9|5kx< zYD?9y)G3+Apus>>^%buuXNy<6;m)Cfe)R%_^<+uRphQu^QgtgsfU{Y9HP9`4f^TiT zw24~tUJwMj5p|yix&^6{&D_dvduzvoa>UU}hB<~~KPh{9k&OgvjMt2Cm3xgb7st}& zHZBQTb~VZ&_28LNT-ut&^$X9Ie8yTd(`1MH=DiPhy4r5D16fkFs42*f9$E&sC2Onq zChaghNESpqip%Rcwa@{fzHI*cd@^3cr@oUY0%Fe&iAHobh&BanZMZm zUqvX4Zn0X~nofq(80}=V7BFXjGknNqTF`iD|7Ax7NEi%$f{1%upuWB7@BfoEHqAhU zWE+RXWuofI46I;weMTwtM;?_;LMUc;>@*9Rl%&$#lbUw%t{e}9sQ3Io7H}UMD zgv6Aecho20r+$l)p2-rhNPt`+sQotmsTui82_K2#|6}hM2MGWU5DqQN9#P-ADeQ6v z1;2sbILyYTr*^@nDgMsg4mlVJ&B-^y1wq*X_K>1J-gxqGK=p9EkVQYAXb{YpRWD)4 zQ0+*6F=5E78DMkcw0p*+=V05nE0Bz=0`X&QydV>6>~8+fZ6W9j1vgiQB1tSdC?p)< za5JKR(5%!37u8Xh+K%expJ`XV*(*CfwJXU8+_kq^1HU52!j!w9&FS{128UA_Yb+AhSS;MO)=;pjGY9T*r*wQ9W#NI$3xKbT$ zQ=QdTiX_9a_ZV4K2*6?0)gEi&N)KuDtIg8u zOGil4Tzg;Eh*y5UspHup8Lhyn4pCC!gtG}HpyVw4_trB4S;fTkYuuJxCh>%kGgv_x zqn$;QJ3U6WVbVKMUm+`_Wb{Z}Ky|&#nBy>JD+n}{A06nq5?&k2_vD=-*2n30qa1pS z=q?ZA2V#l{@th&2&C&nSVd~kkHSt?u8+yXQRj_r5Lq3(xc$Rl?t}&eur*%PhlGp@$ zN1rXwu%C>(oA8Y+WhvHVLE@}8cLyBpZH1la$!iO+IiNBhS{5FjFD)%~0#AvjD14Z# zJULmlb{#RUf4oNJ$w(k@TS++evU4OUy(Hsp=>yrjd~qByr&}3!cnwq&lD$Kd2I{hm zysUSPdQ|ebzefBt1YB`;jHv54kAAsj(O~WiHG0U_i7KMgdq+z{n%VcLR8!}uLDMHm zP#0l4Cb|3JhewNj70M!dc<1NvX7@l?t)9~!MV7tk)j{gyzUTBlxp0a%gLPi}dN$HW z22mVO|E0|t1qfq~8=h9$q2kfe6J_=}`oCU<2<0OgD4oAx_PI0o-T+)R4-r|EMsHKf zlc!pN;ySB_^!8U*gU`2G=2xKXmhRWvZp!|CMA+|C`ltz@BASb#fqu(U2_@_icEpSe zZ}tVvTI>`q2@HOHuA-#c-8TSOi@Jy$oH z1X?F&&94rH!tZ`_&mujp7+{NaDaE&`fz}WGPkbhO{~9YtPW!F$POFms2-I-9wQzxw z|F6AQQv&8#+-CcqF=@k=XB5fKF8Q%ocHh?vOUGKvgJNo5?r^TsngK)-Ktpeit*8wd zOr_Q}3NBh$Mc)A6Nt{jy?Yku>59bcY1dHmB9Nsohb1>iDVSVB{tbSt7R zj#6g&kG1ei#3zn9V#=e3m;carODogtSxzVJkwQwKzsd$>a6#YAKCLc>Ug=K|vG6Z5 zYJP`@&vbviB@>zCXYhe2r#%*>8d)|kpi^vA>GIhH8t6At&Jd(YjWRge z?85&X78ctYGIiWggi~EWGO(y6%$-2~DEnXQSG%51=r_DJoG=cvz5RRfvsKHlx!B<% zWGp~HX0QcnBF(oya4%Pld(ct94Cgc8;cpNc?+gwOj+SozbRyHqqdn zsJc>stL;hJ57btNN;rILURugF#5n0!c;p?O9Hwek+F9Guet9G);GJxgx_ZSrzFG>yM_L;Gl%5Wcvg-R|Q~y;>6LBtCn7I|~LU5X@>~ zi>iDc3m5GyHm(Ke$($^IymhtB>g~@`5kq@A>=e#PHieL8RRqLU-ZO!xg9sPuQ0Q{?r7Xy>gWA zX|tq;XPz#!SR4K0$0KChtN(PD{d~^YLhwW9cw3`V0|a*vwtimqvCybq?ygmT0jB#3 zEK|LRt8A-kX!9_iL_el_85-i`774!P=%dIwZzP*T?ujdMpKI?+xi329k{l1oQ;H!} zO_iQFkOIhl<`JGXe#7nG+7?XLrVd5Q4bprKj~ue&;-wB1fxAjVA{=~T3jqolA-m@q z>N54kQ9{Xt_*o1-ac0MBxors|_I8AqDC4SsZ^`d}LFoXojrRPvpfE?SB>5oH&%=t4 zt7O4O&SHpD&nqduUtXvUMZe9%1%jV4Yh|4ANCiF6vZP{guM@V(7OQVOUmlh ziU=|t-vM30s}qO%ZWU)W{IW)svwFc-*I**$bYOqY=h(GWSi5&w*tdfocj#5UY0+~( zma9L{>`=7`BS4xcR^bEOKnkCVM-`tGg8b6(E62wh3?)DFMl2A=Gg!R>_fP< zC0B;b$)3E(X(FpaH>9|DiPJM7o~6d$Q-;-2kmm(TQRZkp(r$bUx;ok*|0SMKNL?%| zK;Lf`KhJ7fML!F(aKyHN`&3i-&?Mmu{Nmb^;O>RT~bM5Jj;)FaTT46pGO;G_D8l~t^-5L~a0dj#a1H90ln zyV(zLF^MDJw#>&W$&?eJQ46v7sIBR}u(>=AhDghWt8hngbDf{yjjR`&W10NkWcr60 zPs6^Int@^jsfXP|eK(g_t@wl`oIkc@R#;odf(eI66Cdtskoc%KBufrSPl1iggv@rM zUqBN*_vjxG8@X(W7fYnL4qlo~_Fmhrsj6Sjb-S#?t#+UiS^PxeV8{#=g--M;%D7^V z0^3&5j{3?vV8?WYUM5CAY*+jI2~q|41Y(RbKon zq4Ae~$g&a>4lg}~?m7Tta@_>))U|NuW(V6#?d)l>lMA$oDT=yak2A!i zz>}<^({Dp4O2iW^>H#W#4T1edY{dsPsQ9rwHWL|kOrIPPv+M~Dt*UZFIV^Da7@a%- zoR-XBs=9-5J?B-Q>cwTmi8?Df{x?IK%Oes~Vh)arL5D9gK#@h3uor6f;}rs@ zNa^_s-WWdPHAzK#+*q4vZZ6z3a!P=J&cH7^;WYOLzzHLU7r zohA!)8ULH0mw{3uMnl)do+rjHVAFV0eRbh-A~&4O7R=VYX~?2 zzs+9Apv+Gpk4}$bS}WmqxZRv|qK!8uoNT9syc-N)8j%Mi)Cbb}E~}F**YmyiTdX#Z z*1tvQG5*lH?&+2tw>O3^gCHF|XEuTaNi+|&e{#sfPXSyS)otLC2-?0z(WN-q6SG9S zG5m>cGCVb5TkWcHBi|xd7mq)~_7g_@61DgMRFn&y_`2x8f&)`;fp%T(MId&-LvXOv z`tQ<~8|qL<0>vOFz{}&Bva{1an}R2N+?3YTVcCOl>I9z#UKcnyzK-xo3pu$=Ux{x6 zKCbRT#<0^*VT9LRPA`XbPS+nE@p-R*=cvQCp}tnP6;aIoX}D2r_nYFC_;=X0m{b|4 z#YY8;FtG<)K9!2iAGeciUF8EUf!7!h%C0Pq%{~uwfOi`%x+Y^0^~VfOK8uU6gF-fy zgR2bXS)k^-}Nmt78>aFbONj6MCNio-k^n9)KANm_BLEy@N8_Ik8%}{KHDN2O*O(!jVclL-`!XgN|pCpRdJcOXypr zP7SW?jxdCV(mJU>s@ce)`T%&4LIq|)l?^#_V<$4Fb`ih@z+wY_3Es0-sWg@Wkce0kAz0Al{gIYer@(6lT>>{B$Ct zjV}Jm;Hooad>S@mVZWj(*{a{K%X?R5#;bBe($aCo>PG{6T5f%xS7ZYvKH;Dwav<#; z9D=1plG3HL;lL0aJTb}81>=@ij;Vf;sr15nVok5@3$I~>B-vF9LrSIU%`a<+`U-pc z2mU%F4QP=iVQ`U^ET)wP8VTIin=p~id~h`Ssd(z0j(X6hHP_3M-|u3=k1Ytpo)*G_ z?Mk$Y5`@i1dz*&s6O#wV!S?-o-2f5JpyOu3l`Smy6#vS?ZY-a%hn#P(wfKx+bpGONgoFrS!u3lC|2?7X<|{ z>8_mH@Nj_qaJb>gU_a@iH92OC>2f{T>Ws&E^Qa`Ykl`0UJl$8JrmaRt*L-N}S|-^&hLR}!|hR|Gg^P%*ww;)38Jgf2UlMts{tZg zLuA@j_`chht(B1p@vzf>C(2|!h>oh9HA3mVnj)gp<=fQ@^gG1iI@@0WiX~ex*IZ;L zMVFYxQG-qWQH@JS&jH*mKq1s(fA{gT{H`t=y=e$?f*`t2H65stN` ztxbR80A7Wn;y3Y$f5@t`>-s}Ck(fVxc_=?#=En&)G~ticI7B&e$H02JE`DEpVtn^F z^#qicrWG;tlqR13Aege{2Ogj=PwX&Ky4>>6vLm&%Ausyjn$gR-~vV`g^ z#GT~A6J5c4#nbiT?cuGDR67&t3eISk0?|Ka8lSJgR$X*cK~BT^R?h%nt9i>8lp23A zWwPJ*9ZAXNqd(F)ENU(_i9WlNSc*a5I$y|q_fry!wop0&+hMZo*(S>2qHX8Fyf7 zBh<`9qqNy%9xvhEaKm%QbERY&lD>b#J*VxZy!hl^UX%Q4R}oWttULTyo#S$cgJ8uD z4hfF{z;_j)P=Qoo6!mV(mcXfcKypMxXvj4#lZMCHRZzW$vxmhg0GdoEPZMU4jb0e? zuf?v}tHFgVR}F`GD=1&XAq?SvvU6TNe*8Gp0n%GB&CkEjAV znjFcX^@D>ixn=)EFZGAU6m^JpVl_8Bb-Je(82Y1i2|~Lkd^!6Wk>&DN#PApoUoR(v z+Nclm>-zUuho> zHvC%Iu8Tn%_)8(h!O6c%fynVSD23W+1)|G8Kp{GNHGB;3cXrhC^o+O zI#WCc|35W=pSr&ro8-lO_81nHuYPA?FVPvR$+nr_1dS9M9akL*Iacd}bR?>)-F4>Q z<4S29YIwWauud;HaF9(4{glWlIpfjB1Fn~&D}zAV2!dG&Rj=J_rE!$lmvX3n;qVZL z8_#>wH!#;i8I&Lvz2=T@8>#BYT%MKy%7L4JEKW)=zy+UMO19GGyegM@YhDE+zS-?6 z^4-d;6#GyQZ5PaDs6rY&);#CpAV%_ZZ0Q4KN@Tgz)6FE?rwMfy_&K^D*W}yx z5~a;_LNDKQjJGzE3^WDOKCPoxN3Y?}qMup*Jb0@4lYk%Amd;N15rXpbL0COB&f8hF zR4cMtwOgpGJw5q~xC&m=XMd|#Z=lVQo9!u%ssx}Reex-;1C7DPqL|G77S+$r_gijY zkKY6gN*7hW6mdQtQ-&BwZI z4jwx%xwUIlv)#G$^P`-3tZ{IJTZyPc-G)Jz{oeY`;UeKj!)>ZkTm7HV{U77H1{(yv?zuyfXVvK1?g#9fRv&634Du;t&X>U?b1$Rr|T1SWqSY`Z_gEzm93!G0}`55 zyOkm58I@yORUm=uNjE}AGDzoG26?}<4WyT~vZZ=Y+_xzJc>Ilmk!gP1T|Hzzay7A{ z%0wOC#-fM+g+I7|%?fVkQO`koVP_4?7hBN#*+2W^fDehb0 z6OLFji=~n$B$n3v{HRRd#)cV!zyP=S@d0HSn!W4-XwBmW(O}NSfJ+IM-EkO#As15N zq9Ca{9B@(I4pDy5!)6*OG}-X}XAPZ%!!Aid#VMHjx?*aEdh&R-;z%1FgZ07Jcy*sx zzT~8|$Vh9EE!@u@QPiu2uaK$E6nkmpT&8X~f>X+Q*e)lxptx@GdahW!@Jdl^l84OeRw~U3+k1T;mc?kqHaCI;%~YTcTxTmsG#BOU>YJ z_wb`UfTW*wHzgMkx|UsU7<8j!kuz7WOk!=38(MetINN#Pco(HEh~1U4eAr-yLel?V z?0xq?oLd)f5JUt?Bt%a_L>EShPDDf*qIV;@86{Dp3_(Py-e&YRy3s~wNYm@+qD8M0 zy>lPuzUQ30Ip_Wb_x;6ZF!Rjbd#$zi+H0-vcX=&HFFN-WKP`}Zu~k$Zbei?E$m^)q zQ-3ueZ=z`M2*cME0ELCPAM@&+=+hq6=6v+dToU#2J}FCx$u+6lM%x0+=G4b?hd_8$ zJa&odWxM7b+$LA4c9rHG?kAblAztiV{kAC8L_R>rxKBYcjL2kM`_b&e5ag01x}~H1 zMB$s70WWJlwsrwxvyxf_8!V)%TzU3YIcwyr)xH-BaarieyUm3INJ)lYw;H-Qm~Oe8 zuSoVkNgMx#t9vru9t3w4_DP-{5v(oTKe!MbM#6c0@fxv){R++32gSwLpJ-@5`09B2 zjvzpjE9GTW@14h-V@yp1r@IJ?nBn2!n!1{?Sx5ZthJpK!L=WqFP{sNR!^nJrM3rG* zl^!S=d_IOpO5^=QEVdo#me&7z^BR!3U$j{WASRLt9uSc$_+F(|?eN?&Ief9)`iTfS zBRR55Ah}~T1$mtB={7-&G)pjHRf1)@VUMjL0@(PQK%w3_{m~B`bRDj*7fru0zmSW+ z95OjET~agZ%~8x}%sXn-!#+SC_4pZh>XV(`jo^<+vn77F;0~<=R}4<15pLd?WrT z4~4I1-~)jRYE@Ka6%UgJCUZ>+ozkbN+hX=BBn>Ov6Bj;nU+2?q_DG5s&D!!`i{{sC zYZXWOI8G#K%6)@DfHu$f%fC2>=cG(yCd}G|ZlL5E<{=9RrC?f)r zVFeQ}E%s<&+w{IKcE|SosVM0D=1?)VnQ`;SvX8%d*Olv!t+Iijm1L4aG`F5Evyl7^ zs(bQuu-=AQa=0ea6uSACNdT3eqg#l*ZN(;bh?Q6^i$Yy%Y|YV}eMnL4z&?VHc2=51 z%bM>0_$=acEP(!D@FykD@WT|KrmAFcJyW9Kvtxf*J$gS2#covT41Uj!6`{p9zNsMYyjFUFi;b>o|WTAQI)Q*%rBQP_owUy_9p3 zoW6Y+Na7=eaT@$at6j<^7B^z&4*Iq}r|2SK(w$if0dshJvL&OjCr~=41FzC}gQeYs zF6qNwuLWO>`uq;l2+-_Z;-FJ87N$Tr?{S-{LzBhs&e+Wc?x8=q073^C$!Wq$DkjRu zuxWBt&Tn^eU8P`FZ=O&4h6`9U9THlm()=_-ALeU-SC8n!7+h=7P?Ldj2bfreypi45 z2=-3T&U8V=%Ibr<;EK$veI;!dcU|^3?h^KNHXyuZ00ZCQ<YhK_uo*Ab(87gpWLzuBsRs!yY&M`FvkF3%W9m;AcHNDKdsPe!nxA%*p@f(X}ZP z_i~ewYMs`Ppk;MD#QW28bCC6k-SzZi=vU?56x>JAF}v4WYZeqYCVe)$UHVY*?uv)u z;`5Hy*&Cw4wbmbREPW4lUqLfDk3YjP0l3O!Bvr&B8iN@WFt-)J70O_uCT}ba8|d7} zIOzYyvX#Lemein|hoX=qg1YYu=OuD=#P^lj;C$4&>Tx_g@z&jrs+!(I;KejIuS3d} zuI|F`!h?d^kLej^!I_|~2SG>iNl-r{_udmMv_Jo87t)_h6Pjhm%R$jMlp}vAvSZno zgz4|Rn_P<@j77T415mB7K~n~fF?_9_i2cxvk=s3L@VQmrN_dc`DxJXlRo?gh$B|>1li#`9k(=@y69B%zZYq2h$(p* zc<^IolWT^s#P(ZO@}kYhk(}Tx{^~)92c|DiuSuI!y8pwH)+c)6tysui1a05^Z7&iV zgof)@Dt3)WzfzxuN8gfy$bPpmB|VBAA&Rf@`W{m_%`v;+>byF#J>W^qZT2ypqFT=n z+E{KO_Ggj7W!032>uDvP%bjL<@HU1)^c2Is7A+s7N|zlBrXwf;Z@xC)E!M5^x+7*r zA`>U&hj1D-qpo(+9JtD%la`Sm7Cj)hI7uXG><4~Ksy}*(nP#!fVOf{c=vxhF_td!K z@ZB}O6_5LgU>D&UMCO264bYX9*(Av< z0N$AaW*rO65gV+@!jv=;)g38P_B*&%SwQ_2_H3OElFr#z=RlNt}B4ykSY(~0$q zY~ts>a34d;9-%svo06hKgHjB`^3wU;)n#ysre2G}`_@BQNr?@l!$SRE58Ar((5Md$ z&vD>E6jD^8pbx@$A|$T_>y@UIK6#OSjhlXNs%?zQkgZDt5@&>1$~~wA`V`L$l3`mS ztvO)egV$l>Dag`}eXdOyf`mLU^EygKS|XZK-p|q9K=MD#bh4e-?_(Yq^k@KF5hJ8( zfOqXsN{4k;;e=%1_8dHl_WiNRxo0jf0x3~8~Z2AK=X=d=GXZ=e@@TDUE2*MA)i1X{%Db0i!_mnPc#>c*1>4>c7Z zVH6b;W&BdNKuoivjhJM=eH~`6Y@2uQS%2nUzr8d-0m1t2tiM{W%HDSzoi03lsEFy` z(#9)V{xrhx|2SXcz9vL4NDVG>0-9SBY)3=y1Wv|~3T z3MC#P%So(1+F-~FCGsBM&v)osC7|01e1L&wN$7$8qKCJ9oMLRA^YYxQniLCEYh%D2 z&O|?cqwmGxyYFl->dSQL{4^iCnAWEA49ti?7J z2&9jnY~k+H9dweW@dkWXB6cp-cL;e+H9lp&Yy@qv`Z_7#*I^1PPY=D2rtR)oZ=juZ0n zP8U|S4Ebos_o%T<^vwfGiUZqw%B0p8GFjWrf-_j1N0bz=tt+SdGQzeoa~M9xHb=ZK z+IGSYd6k;FfIX$tdHbsefKwT`Err*-qjFy_OU$HA+isu-=6dct{PXb)?i8`KaCiJ> z**RgKovK(LD-$mzy}aQ(Cj#m}GtIIj#2d&ERGwZzoV5c6JI6~6&xk>F8s9v)aA&cB zWNqYOs=Z>;2l6N$w;wLKB)7`SLu=e|!`5YYpuXj+T_%QLUp<*a*c=|L78cl8}~ad#icM+!fqO3$)$FNw00Z`lZ3N(`f!}Qrmj}? zwfm~zWbfmR4un*flc^#(2Io2FQ_s9pAe`6KeHuNOIY_jtd<&G8nMf1g$PTomGw>eB zCEhj1b|d9Y-CMq*+N0jx963!Gxa7T;O3DVQuhsSRI^xIrq@}$n=gBoluf`NAM^h7s znH2T)3A$3Qpm-wyl*!(6pm8JwvL@5OrE)4!2ekqjS~7s5Bg#A%A*D8>rE@3ww!(M& ziSKmm76XMZ2(usffdf$z$`~Mk^L7AL66rXBp5U&RtUj6!9T&b%t9Jg?N^Q7r;fvOs zjh?%ZmJXn&#wv6JPp!bxnL&WzuRj?7zRS7O>1JS8U7fSHzTSjyj^f0D9cm=4wZ}`- zr1uncH=5gvFKeqPpq(>uGgUU;ZRw#>ya(hhNWar)+9Yqm{;3Ep&d#M|@ZjATUYMyY zIVwgNx;%oUEyf3jmOaX-+O1b?Z{{gzFTNhunJA80zOhAYL~0TzIZMU;Y-eb?OFz9v zTa@43@V&!8{_sXe#b=z3wHFBDZlJeJdueQ~B`YZwxbPd(i{$e=tLCF}4%&iEBmlr_ zCwMnl-a$=ZGTPS}$LnF}|M*z=kzz}a=yAX|`$#(fMJ;OEQokvvBb0UAeR+FPielq0 z{!}?)`av{jn_hl2E8PuOPTSO+{N_lBiiF{Y8afD9d@)xfp&OW`tFv#qFp0DAjb>)P zZ=P$SNWa(gwq8EF9sLWVon7ZV63rTShkWU#GTosvJCINw|CsL~Z@uh$r>C#jeFdV) zP7Yup+Gt&{d;Db5m^!jfVwJR$PO><5xm3@A)1)xD2886R+cKwS=AU(qEFCEmS_A6_<=M`YX zL&Y*}04O2D69CHR>kHsWw;n)-cOHq3U-r_IGpRAX*#_scGZWOXjcrNeDTo5dTg*K5N{|h>AN!4|nNCalg+XLcjvW&#}z&Imr=+%7i=t z2ZnwQ4viq8=nVS^K(2OEbbl0SHMxwAMhM(3b^xlF z_Ol?HeKfrB$x%S!rh8R9fm?G&H)xoTboLfx&(6Dx&mTizXw;Z^Uu zi0KNS-y#DP?{i*(XZXV4E}iGveyl3PevWg$uxMMPdYp_a?uxG$Zkfn5Y7}3or;E&O z@R%5D`=BY-fFFFj6+;_xlqjAEBknKbS*7k+qg|~HS;ZZb#|(@dz;+n9WksLL&OAYDh z?=8E}&eo5By?jRQMoR-+1`dP*3I*~5y4Q$EYT2Z`wzd)jM6|Y)X+7$m@PYg@Z8J>k zybM{NLhpUe8#cS6841zSD6ksLQ2JKWRkhVyDCo6vAB)ORd;jet zy)yUa7=C+@M2z+jmF%-5P&FOm?L&}s9ku;V`nyuRxwJK*!kX*JK9l9(nhAjDG8*8* z?5}4vIKo^rDPK?K^Um?L!A|^1*95L#DBpwb+3j~4`tFDf8jpbst5ES$FsTV|Cn_xR z#Yloai+eA9xUg!62nEnXXMoYCUt6-f6Td1ndg(_BmB&(dYJn^0I!i*%v#AHxhqRf& zbi)0GO4(q^?I|^JL3?}?k;B-2uLK(WJrp@?{PCB4A9mRs-00zRMT<%PKf z-^IZrODbg7R^WR*Vg~^Bt@#j75xy~dwc}?q!|a;37eYgjIcisCuP+ylT5V30CnA;L zDi}W~w9CqOSfl0NH(IM*CKxWYaVlETFfg4cdw#kS>Cy4tQW$$Y+`(~7gAzzjM*&O< zhbX4@tt$NWm!GX5vy3XZEf+=u zkD$(C6_~6pw|?8DzD1vP=aS8=?fm5J3E|_%c14;*uE2RvOWRKMBj{p+Eja3KEH>IV zcJJqBkNjy5N0|Dumz3yOlU-Kg0)3Myx@)Ykwmr94cWisCtuwvDXu{j$4LZ`#+moYf zBzViEOPaG{+)Jxre{n}V%?97ZA`mT69X}a*S_O~`d~CUCd@k~?-)qy?K&2z7I zDi-l`V=r*qJ2`Yh=&BbazK7!szX9PQEw171li^2>_93n|~`*9U@@qE0B{i!)Taj-PbLhk-s_tw=Gr z`A}=d&Htp_-_$qMACt}o3q&d;vxdK;;p8xpz(gH29a_HoBE2H`d3)G*vGt>FxQ;rn z1uYY(YoxeFB$daY#4;EDZgsp)5pbJ%yupx9#2d7ind+o(r4A{%b<5RmZ92dG@wEU? zd?5dVL#al)YPs+bZXo>ZMxh4THg}OPUP*-YX^a5Vaqz)(HT<5EQZQKbo{?qSK*7L5 zC=jE&iZ%;RoLDx%QZMZ&DJf}|J-)(eQK;k9_LEC5c6cJF96#i^eX3jKoEg@ofxkuZ zeMu}vq~l{C@dY;-QzfjHqX5ySgYd3>fZa&$Xx7~uxzRdhMob0$S#Yr!POWd+Z#b@8 zzOXgo@hEn6-20w6I@69xD>Yz0({`%+guR!GyQToLj<;S8gXt=@I|i?L)SmA@y8j>< zA-2hn0D6P+0Mr&2zpo4xfBwK@03o4LmdHkV_t5qLhlBbS7?r!&{(NE7=^@(^^8sBr z=q|wc*hZ#@aQUM~x|x#@U>u4GpGLJ#eQtRwWkr$Uw%Yf}r-$m4D#Dah`ALULkX5Mk zNtr4_vOl2P&{at0A0K(Q31NY>G!PqeL~_xqoHleXb474w`+*$%Z;_9jCVFCR<^LvpxU{Ndm?S zt0P_-s+uh3y9tgkmOq{%EV04+XEW=Ee!IPff3YEb4qYLW9eD+e;|rQbFdxY``XAOexUG>EboMxUQ|pB(`911J{iw>ZxDJ>_{Uq^Qmb#UxWA;qt*-$oLR`1mndHpFB)@O%FF$|zZ*)S* zo0lQFp6=-*Y3YH5Mpy;ieABx?3%ck)>q(9R|L5&TIHbb?hvOrv0%t>FvCIc_IJ)~f zxRT~O?LEO7KpAO|#(lR2&}t>Nny!1bc5_4}n}}h?R#Huxhwwl-JCNjBGcEriC7aSl zIO_mw=|qnUVsPj_=tYurnY*En&w`B4tJyCcY-vCi@OCSM*A%@z!^v@Ms*|kRUm8Mi zUh;eyZdr>v2E)vpXYfik=9=wLQT;;TtA-Yu?i9TTKK_yL8!IyLXPVAV`|k6@n~a2@ zEcHu@0#U!W43Z6@O;N`%`jMae1(kD#1N!6FFjFATzj&Qh=FdO;@+RPu0DfiKiQwGd zYxno7UTy#_T4z4R>OXUdq9 z(wR373ue=>BsYoIeQr^FAPFw{+sFVcr!IAoOjpxas}q<~p)sdSUg|9OPhw$4KC(BCYce;2q_4(+c+tt0{UrA)2UI898Pw|;HueM67 zo|Wy+o#5Fajz3zZ-4rAYrerC<7(y<8XSSK~=|ZM;SBhbMlxX{mB>>!oK%IX9r-xzA z12fwX^{ek{@A*0lw(D@d$H@u$TEu&%2C}ct5SV1Svt6;GpS<7ste;KKuhy&5^bM=x zf*$Wi1)29R;<;H%cpx@@VtzejF8S+4+9JwlrzdPm;;IgqpQgOZXISD-tk3R?82}*K z{q=PHo`5ONEw-2l-azP*PPUo!dxxgNoHxY!#kQz z{&mldW(_g+`0_R9LUZ~vT-wd|7wo^6w0R}+nN@F>)a-VV~rC4)w< zOLM3#Q@*S>qs9>Cxcu@I*ll071!Ww|q$d>Ot zrN%M4FEQ{I^B^`xxSjQa*{52Ka%QZMdXn+R*Ok?c;RT&017{^ikn@u@$zgHb8*?hE z&wtV51RsA3#n9F(Fw4#;mQQHBu>rFr_VEx^zR&P^CDRI%9Tw#XsYA-v#y}+ipZcKP z$Ddr{2Jx9zT)B4t#m^%9k#enw(A{A4F(ig7TvLuKpys1tPgOfwVYuFFa42YWW>G!n zac}B0Qv~vC1#?vgYiteyGtqR}qtzHQ#a{+vX)24gW&~lVh2q>-7On`GBpPs{ z=@v`z9hlkgBXdy)|{PPNxK!n!1zl#GpAfqVP4My@ll z`MX|ZV-dSeN{MW|K*?M}k}j5DZ+Z$AOr_&QJb9>0!ynaowbXql?m{l-`axpwOv}?Y z%W|@!|AxLUlL_S-X&x9Gn6%z~W%Dectn(Sx&bqoqNKD-FK%eywgH2%cC*1#Wr&wE~ z#U{yu_p9oYj{18zy_E==lVQ$H-PIThqusWUL?!3@Z$&jmgKd7*sauzmMMK%xW}-JT zFSgP}zf<0Xk~ypOoA(U4HAt5u<;Ztf?z6R^zI`vYsG500xALKWfG|$~Y*(~2lr_5e z4N+LF`dO}pHdgN1=FX#)O3XB`;-_sRdQ(El{y>`?fhtpJ2eHlwI$Pz1VX5H(%P7!zotFndao9m>@xF78ZVa z66`I2K4seqf_)mewRT+QG@kSV2*KaEl^Q1%zqHw#>DM;+xjtIVWv&&-3>VbeB*SBb z?DZi_{dwHHNYO~+Dre%T_2%?CtIavyfsOk(cCVi)gPF1fr<9~rjQ4Mb?)oabuYI2# zt@qh??nUD_>pgd0l^X9>pHaQ^=jvoQ&$jla!fRf-0B3TrWcTuw3G4pcL_Q$r)F+Bh zh})irY|O>W!{3X$iK|zecvcCYuHQ{=$xy-`4oGcH9&1)OGEMjtz3mGbgT=?1uCTs+ zYjv!L7bFL%yt4fSA!1J3S7YJ$>=!@R$TITrr?)vuVYr5YL=3eeF z*ZX31@%sCb9nC4pzFYOGiQ>aZnazCm-s!c&UY!^bqK-vQMF7FdD0Z8Olp>q}O>wD` zO5_6_cY23BF0gJb_N*47$|XyC#i-8fd~XpsySTa5BAOgdLB%>d$Eu zISWs2F92y%j|nQO=Cme97Qa|>@wR33b}YcRbub$*XqB74zbi9Qa(uXZNZjvgHl)Z5 ze<1W`L6!Bj-*f^+oO>)o*wkLYgP@BoMI=*c-@h`Kag(&DY%6*P{ zymg=T@Kb|+SJMpno2>o?R(|}^!CqtBn{y~_uHLcX^$^#Uk*$K5;+LjgJK_DCb3=2< zilNNWMou+2P|94rEs?3W{F>k7)7>9M?Nlr$fi_~SiXms!BLLPi+4x0|6_8Yy2t7WU zR!^TzD~xUPJ1xM_25lVy1wVv@P&CB?%M%y=j8Bq+i(*?|KdX?x@5b6yT5$ zaHq7Eax~W+m-)_33Ez&xFOgl%S;;<=72qNS8TDPI319yGg0<)KtMy1TU+&(xs(lng zKK(PG)aIJoM72Zw1Wg^YkG5gCZTOw|ho%6XHo%DCi=B4syvD2f1=&wqayRLx)3?0A zbcgr>)N5mhJi3Lr;%Rt1RCYZNjSZK+C8V_N@a6n3_oP1rOm}T>V^e)5`UDO)f^=u* z9c`t1D6l}-*)^{>7t3KdwN8Hq+6|Pw^#QH;)ms6!sajaK_NZqvCXrq309;1*v@M1= zKe)~~S=e^go;~N-L+Y?SvXh4ZCvz;sZiaAa9C|)I`+6De##e$8Hrv6XU+?qcU40cj z9U8bb%WRdy6k$K?y62`8nRjOo-99}WF0(7Weq)PwD^9=IJoEeTv?*42brcib#N46p zP)E9k)dL9D?4k!xXMJ@cHiKi&O3^+P_jrxV9wS9zOGYs|?99Fk1QW$s?y4iAiJ%L4 zo-+G;a62zG>IrKg@26nGzu6H=za80s)Sriy0OtAou6-e}K?6q|!@eq0aGFQdv5*){ zmTT23+(gnZR5_zMkLa8pTO77Yig-HVGjC)pd8<*BDda7_eiKi=8XoMebgE~%dHwi! zmsK(l`dRVA)@%=1!PDI}38#IG|G-W+kmKoqn(wt_B_D2t9<1N3S{_bsd!blCtPBZS4N20?}hbh9{xJ2Dme9uz_H2U(*}88cz|8QJWgHa zXa7s`A&x+DdEUZPUh6?NFYSqNlt{>zz z>K^jqNAXsOd+VZDD{*D3S>@=X7%TTj5=*D>jcZu*PP?l|j7NfxJgpZb@T4D*P?k@K zj|>M$0$Krh9!9VuAaOoZ_~Ld?$_ZUB3d{{VHn2&g6FLWtZ3F#odKWC={)`OT5~D&L z-bI7Q)>~boM}-k2gRV~q;cpg0S0uGuz_2j8du+x^>5XfsWcN*+T4LuWd9EGx6PqUv z%L4_z!KuP6LMhZCmWie5DL6GLg-*)Po_D#v^mHV{k0#Vw%X$S>7>3@1lPy=^B#-# zXgqXlcqI*oT)L2L*C~Oo9D{bPVs&=nxq)I2)f4!GT=;;)P-V}>8B@3I?X=^;e<;1| zP49~{3jmy^WT3#Pbx0sx_1=tL9<^ryVlKMOE_>%WO~@8-;2giRvqxTW6l$bdl>va_ z1wR}wE;8QHKD_P1?r_m-;YoyxhJF^rYhMQ-F;LQPbEOfnojlhwX{`$$aox-ZF?dV2 z#`4oX8xlWQ0y1-VxMTQf9K^j39V*^XZcVCr@sGjP2c(>K*9Aj=jfMWP@178fmG5WC z1nT=!QTW;_Zk#p4=&|_L`ksGh7uLGFjHWJ_eCIv7)eZ|j`mE~m*rs>n*};VvA*_gS z-;A*4j10H#o1MhD=q+Q{x{zwb^4?Bo$mZB+tw;*?665_LG`k5DQ1{!5ZW4VW(12CL+GQf_tDY zsR>2C+6B!;=czl+{+>QRr<&Gy>i0>#x1${OO!~6t4Ky`H3VPk1MlbtMBg80A;DD+rntm`&-`3n`R-xkC72Gbn5 zJfP#rwp)q^mk}btt|$O8z-{Q= zr3Ub#US756L?rp8CPSD_vgGo1$4|QvU@o298BvxF1W@YlyW1BF@x5Mj_`S*{KKvp1 zKCjG+pD7cP|M25(hDn`ww%qIjI4$hP^K(>>Es%IFJBPx&!fH*!GF$eR;K*5^AZ8{~ z9s9Fj@GVf9e%O)aG52%ov)~HDDJnS`evn!Q9lo8yv8o6@d|ryvgTO65>gIwO0msOJ zqA(@!#*h*z;SCA*+A;^+h#`O~ypP?)1sSR=wSW7#0@j54&PttfuS_~;L%l zIHTYbL(Jl2?ED}ZP#$Kgv@86@o*Ig^pudl^P7&Y8Ni3sVYC9%|)W({nG? zbfYy7z5i|jH?OehEO!@Dv=$TZbafT0)-|YRxU9y~7(w|Wg`mRBZZc)EL(gqXk178X z_ZS?1Z2BJF=DY7%W}E!{Be zb$C=FS@;=fE{i2YLmZ~yDNNMQ9*fKBa-~ULTO4()YjJ8Irm%%}@JZZZyB6t$-G?;n zk1mgOL3~e?kVMRt6=b`$B#(q`7lnJX2J2n5d-NclJ+SL-f1Wmf1p#g{T$mXGHg$$o zdHdEGGG~n4&Z2NGimwKs^0z!-!P(zz6`p7;Gz z5P2I=dbQGqrUnoPC&zaW@^MIKz}bu0@|-fzdSBooD+8my*>^H5R5UcwcbQyId3k8; z@NMY(U0$xFu$$DrOtAi+d8Vtb6VCIm2B6+;=O=t`tQYj#_f>`bL*dfyTxRk6acEz* zLkBx-acj@em&o~$)YX|-V|o0V0`)c{&f0VKAxzaOtIVd0dAQc|J)@LYAES4`f^D4V z#!QP^R@hSUQZ_)`xe}JEll!>-=5czap=^{Ml*J3^8xiAxVmDO+?L9CzH|RpuYe@6B z|Aqeezj27mtb{PpwR-|DBdG^k5+!1_Re6|MBj_Y30-$s*7To_Z5ZTF$i}vH?ij>Ua zKAIu7{OVtDYLzrA4SVPg+l4ruAawW$n|{xR0taD+-c2W;kumYiiuNa)$?fo9uMgp! zicffZKSllXoV@T*Hsx8P&lL{g)!UfS{7*q@o%J~B>V;V#f-VKy`6naxg;(I4e0&vb z*LZJ~Z#>}TNVUE7woU#qng0{~jg4RPg8w;^4OCNXr;e_6pyI00xBE0p*9b|tn$7(# zF;f1+U%MH`Ou%&NGPp(rbT0AVvX5D-RUoGgGglygV|T112&7yqDgL3f=lpM6z8@9E zbGUQ2Wom|XE}Ta(6k5qU&FQgbe3gaPhS@XYNn>pUKfZz5&tk! z;4CuNekt8=zV*r3@b}C4nE=D7fi)2`QvSabVE=V=U4Ekhq?rZ0o{I7QQ;O&On}K4$ z_X)gre$4>>>9Kst2Ez0=>WUK17X zn!y;*zcFaP2?M5bp;43P-)6FMCjK#48{%KZ{}`CcD>KeN=w|=c;Xhv8bUzdS8fLcr zU&YS`Oyxn*wKMMA->v6z4X_60yCcrl|M(ZM{8!F6(f=pT|3Ad(UpXWt+{u$i#-m?q z;%ii9s%t-VXA~%Jc=gFM^Fhm}-V84~JCiIQT?ylmvzmp!E&GpG7r4%DCQ2=ev&HIJ z#0pCIcQ8Y}SWsS!8s1ur{j@%?Wuq?^WZ6D*J!xLD!ci;$nk0Bzrptue!WzA9G>TE0 zjoVz>p3=s;^dU@57#@F(2qKILuD+Qy6tpBK{oh~b|MTKZbIM3~dQ$+&3&BsLDaxDy z(3}P-C}A8{&FH#N6QaMB#3UA{$752#I%&#ldfyL7qSq1@RwNlo+{|`g(Vt`r=hmAP zG>On!sqIS2{7w18X_~|3FptwUPssem^2#&mpWqT~64iLI)aI(h)I-+DOBI#)<(K`q z*T#3qf;LEaYzTfgt?)B`ekGIE7&{eFFD~sIZOzV{XbzYNKK!-LgS+CHJ{5f z^S5&hZZ-1bkAz2Ory;S;5q=)cufa71wfh@QN?@E&iPzWjb_RI~38N`#6Cyk9r|hS! z$tTqSva;UQ{@J_sBb(A%QODx;WADfnVYsFpA}d)}({PZR-y=y+@rM>!ifz_U zYMT5=bk-4Tp6E(Uhtcj{XR6P>xn`BIKvi}BVsD|T0#gB>zN*C#8c&JbVUkIRQp03J7@jws&)ziwU=~#t22I{!G+4_zHc!b zLbg<|I&IPKuiwT5ty)N`>Q1|SJu&SH>qkVI z2XP03*fjr0D!h|ZE2q7cgwKp^!}|S-!E<3u=biZ~vhK!j)_MGV2>ID+TG&-R)+-gS z5Wi4=@Kx1wYKO+zvy`ioXJD&dKA0%vRz=S3`xe9Oo?8VIM;kEN$B}~~oA0lO>BM`q z&8mlPj6SmI`zgIyIo(U*d>Ula96X|(xBVv<0DIAX>pX~PJE^p>NX;<_E zcH8LT47pTT(kW-k4R45ynh~cw-httAEk;z|Vr_7g6dZLrKqAuB@BM{uzMtQ^<-JFF z#Cz~dUVVB23ykF9I!n@ETfI6vQj+DQOxCn2JsNUZP#@7bYr0!8;YEGvI~ONGBUnN! z0NuI0w%9&W)n@VAzH!s$qGf%eUZ*ShV!ljr8PCm>x$n;^>?plDxTmFvpq)NDTWFka z%f5O@n6S~#(E9*sw1|I-w`w;VCd0q-a3Y?|khKG$iyW~UM2^U~gZbvg(RLjkW8Bl0 z%WHwAc{q&yP~Y|5^sqOJ=p4-8mrg?-hs}f;sRCO|U%sipwlyNUdnF5ak zpRFC?ePs0N_Z$!axg0`|NCzE1;)npptAZQW(5Delfq0Qs0lA(QbJ5SP)&no?2D22s zNcs}?R~DPV&$a_0F+-)kT@&h%JJsWbb@LUy!s(uC4EN;@sFIfrsq2Q@M-=dnhpGR2 z3zyXe3SVg&xrN}JBibq~;_l)3#;X&SLj4r&$_^y^$`|C`hEvA|Ut8#=hp>V{DmJ~= zA&miSnaZ@z*4tnhu2Bxfu4&!7K~Xb<*gcN7f2}!A$~yOUz)^+UE~UlsN1U#yciq6F z&D;(vbVc%^kK|aA>h?b1{ezf%`X-@J;o}ve?&*{7=MNK<4!_{&-!vAYL1-64Sbm4w zSf0f0ofc}ms=GD0-9dWzrUH7qKlG}UU%rA<%~<)0m6qaBq- zyxFRZ<0&<(mrR4dhT9#yb-wHH!*iE-JL9#TW|iftn&(=ZK}o|FVpf&@#(gMK3hMH$ ztao^uTiFQI5Fp}kbTogbzP=@46dZ>g*h|lx{*mzd`s}umkv=?b^yyCUngWfR=sPzp3g?VEdhKhd0((- zF~hn#&4L%_VIiJLglz*?SJy^`^O}9f^A+wkOa^>gSGwRUt6(rJUGwnp&S@HC%=M0V(Vmi#98c z-;@;Bl|-5Fv;T`#nw-1`(sl*IFDd~yJ6PS!^&dKpAaOQ{U(&37V%l?ZoV4faCA&F1 z2>!Vdc{n<(&=QMsl03>jfzPX?z%QkexN?x_42E}-U3wM zad7p(u2oekm|}77%bS-CBQncsee}I%$$Hg$3Dkjvwv)5#ZS*b34sS~=*??V9QJjHp z9}q3GKhVNysN}_`q_CT;sbxiTE*UwYag7w;;a!W>DiVEB^JwX|PHVS@a8`m6 zETzM+*q~Fo?t{zohB8XmpX;BQ6ddHiUYFo#snOQL*Yc_krJ`7}ODp~Py5IO0gihLV z8S@Pd?3H;RjBrQ0 zXS71eZ18jvE|vFJ0>-UsBF` zlr^HDR6(ON>xU{&5P`d)gstxdKG7nh-s)>Aml`Kx*XGsJzAAqSwI~gO(k`t>Q2pkY z*Z`&V?zuHhlRjUlso7bpOlWdSXr^m>stId_;Ei?edp=TsFg zera2^M$~|g-5Au*_Y0+D5m8mbK->F1ukDA5yXYx213kUe@DsRi20kbhW-danI?HA3 z<<~(>AzvUip1S+h^8`9i!Ge?#t-<8T!k5Ukqct_RqmU_cP(&7x>x{6DKHk9O7&X#6 z^pEv951z1LeHIgYV57@v)Qy^B(%33TDOK6LiaaqC34>ddI+L4rym2+G)Xtz2X}?pv zV0L3nH3b|$AdXYT8Se#rBI5k9TG5n&aPOMFIc_!@@@%H8 z{XBLWilKI|(lAcr@2_9@fpOi2gj^-uhMECD)_3D<^B2jc!);K?-U6N}W zpQG?P4C=5?7!iLak4P(;h@stf z| zTcEdbG0AflXJzEh^wS6fJALZ8K8l?o#bp_}Kc>U9QO^YZtrKFjmX)O0Q&(^rs)7KGQscVtNsC6)kq@ljYp^)omS$X>JX zI{7R!lUB<0*5#qPN0Qm56*VDj+hJ(Uq*S?WXhDO!Kvi5kkIJ2~xVF}{=S;Bk*&X`m zW&P1Aj{e1gx%$2gxvBAFRdU044aN5baHvv_1IeqnV*5xRkgrniY{}LCw()cRE9|mQ zR@qF7Sx$|aisoPaAf6dZTFo4{yW_J=NkaD#7kev!dZemuN6vaz9)*dbL66;{Rt<9) zdXa!ARfZ-Lo%SIZ7aVe9ICsA2 zrGiB80%+L5zH8rR4A9pV-Rp@v*WBj=!~dyuOvq7+M$E zfdU=MK~Al@4w7Jqm+g1vwRbv}Z8J@C%yT9K$zRM?#M<4{h_9;-v^tiFw+rwho*@K{ zWDO+57yCp?{v2TQ24UwZZ8EU06CeZv(&)Xg6gtL}|Co(7mHUtku{6+}Zu3V98it{l4Rm z317@R!OA>WhkQ_Q*wz;7($<5YN#7GIWZvzWl6LMkxE~j;-+ttmuDxo zKUpxswxkoN9MgUuEGW(pKFBiTLV!LnMb*L7Y zA8>zNVFOmgB&E2(hmkz?u; z>7UA+4hKa4)uZk#Sa{iv;MCnPe4JtS@VwBA*=TDd!KK%i;&~w<1~YU+KS$UT+BOjM zk9XrIWAqH20_+UiIQ3;$cU0(`Iq!gUwZOYsm>zLV{t{+cJGMU$!7+yG8TQQ$`nuN& zoqgA0Au6;ksZv*8N}EsRH6i5zr*28)j>R|>2dry;KCeG!w;Tf(v_CTU+>HM}?R{rd zliSv=ic&-c6ahiHf`#6DQRzsL-a&eoUK3P6lqy|1NQVH@Ye4C}Lx2F05-$y=*6d*%?~z{IT&8H4$R;uU~W;b)D>Om_mxg%`{x z#kuMJ-hMaCw*IsE?jBUUiYHHIa@B>=g?~fR?;R6#{&T6Pb6PMLWQo0hf#qAbrRe6k zp<$JB$STrAg&r__b)!2qx9w1K>ay(n;?@#vg?|Qp|BbOu5s!J-UVPOK5g}z z%pt1;kZ8y9X1d2MyIp(jo%&i9HmdAoGM zhle<@`yc4RZBPyGw%4x)NKhNPWqYPR4$>OT>x1HK464}+w=moD8JE7s*VP6`%pZdr zo{#181^;fgFt;kXu|q_LREefdp02OXPjI$*)wzw7er8~xl^n2B|0U1PGS4O=lqF1F z%DQ{Ml>f8F5@espaip506P{Ry-g`^&Z+P8(-p|M{YLcdGl-V8rH@X(Dk~E+%WLkYR z#;*4je^bdv+A+HvCfe5dOHZVbeh`crao7Cy=bx8czTjJixG>ZuOkF`z{*J&Cl zb>+W_>p!>G8)8VMPmI8Su!<5R^cac#nZEQ|^uOv!f7uy4h(;0*n*Zhy`wz5O^*M2D z85KbP$xf*7omW9;jJI3nKi29@)X6nSKK_^7oqt=3T8Jl1tJ0VM6ppIEc9Ym2=S#K! z-Kpk}7yW}wx|tmPuLJsbt^Y5RxjteAzh+-Rq8d$^g;QjMkDV?$clZ-lct;e zgdgj2u)E>&*{-_1J7y9a`Vmf6tPY>qRg&R+^XghWvozNQ$e5R+FIpWj{ZMcYg`~;bFNvv*GY5K|4oH{>H6PG>MvAHjm?M zB`>pdgLmpMh&SKR6O8_JSEt;fvdvenD+e6C%|JiNpHy=*SUV{>nK0gPey@?D;`Eci zom=ldbrIMvm@wf6`sfnJ;W&r|P5kS}B=0mSIwX&5GrHcC8z^JVsuP8oAQSpQCe9n9 zp>)yq=^)=Y3A(@58+q5IF?ov;VTbX=uJo!D`wLLgLt>7j`fa{9aPsUqs-&27p5`VK zmnO+Y1ngpdD5?0uc-yNkp${C^nAvRo$-Y=I%~;FiRO$s(R`T>?2d-Zlc)5}2Wu!s$ zEGu@o>q4W158jL*5KbX7Fz4~y`bqJzt5*_llfy;B*4FJiS0=od3p*nh1385AB3~~) zWU_j7i}|lT^^95-eBF@zTCZsuj7u9WU98rnoy_GZdSsS1=O~9?mo~Az;>y^-Aa&9Q z0=5W-gqla)N%wE1c*RQed@$8%(tL~nBK^G8`@Frsi;Rt=K81XW)nupUdUtEjbog-N z9o`437b2kvqD_ugcvN$`w`g7^^5@Ys62j-Sr zoMV3VWEg_XIothFiLaGnBBZ8@Rni5jx2IW%KO=!wt zRi&Cah^)$2r~0s?21M9tNd_?%=!lXSaJddGBPLu5ozY~YR{JA8F2&_W)$4lx%2%(` z^Sw|QBe_LSMN9XWwNfFCKEGt?K%)roPE(FWf5stMwhhKcCBHggTKQ680=2+3L$8w946zcjlAY{9FRlY6gzsMm_MK1OeSS05?JfyqwX&4?t%R=aDh$@IZ)cx7`ojDK>y(CB%E{W zNqaVLPNZvBgK@7W-27QAg8J;~)u%WjNe*9rIf??Ag8NN@Y7nhgUt6%`39b-#0l;_O zbR~_np-wQqIM$iPxV|4T{dLjAS?uPGD1_uo#!r~Y`oSZ=dZz*yF;h?Dd^g>%gN2Z<&Ye<-8o_(s%%3OK|9{zETDnA!hF?XT%?3u0)zhMey z&ot0dWBW45tZT}xeN<}R4=JujgTAI)V*;HYgU{*>OVm=dil`bt2bRW`(FMFGVMvEXTo?Xpql{RW%Wtmqu&#+ zeS5x_xq%4uaCN1X#~wh@QjdDFTVrAmy6U>kp+!YKuPW@krqCX9huI=zinSvY7&l3R zpq|(l#wI+L_1o{e;CNm$vBd7k>!fa&R76V{?3^%v$d@y5*F74k6@vYpT+_vjFbnNz zG%_AQ0C}gon7{$E#;zPYu$PEe4~shQfs`N!1%Bt^BbKhT?auUotjylvxMNYTfp`n= zgUIcykR460RVv=h99|;l%dt#$Y{v(Qk$EDG3V#}}l|2~fWR)$|61U1amX&*VUwqAv z8V3~cn$|z{TnS!uwItEVoO+{OW4b7{rCw@qwA9A~TWs}drWFd)DAj-DNEpj~HFY7%2d+6C$72sQ+Ei;_uQvYdvB}NAzXCTVEEaG1 zX}j(tkujax`jw_i#BdCbzA;}3y+?dYF4mohWCIwtQH_Z!*bs^AxP_U_2j=L@^E;i6 zT$nho#+;|_K?6)tio}-qmo%4=j1k^4!RPX^jGl56?!q<2#l;%iw{jocYYoV`R=h0> zX=R@I>rA9(e%JDAmkWuqUp!1TywD0Riqm9M$_kkW>CT0|vt%c_e9_)K(-&c_5Y9Y$ zjzfmS(8o{h)9bzGx`d{#?p5Sjns~GxJhA%r!?MQeIxtJ9Cv%P;ACq>*E9@}VXkS0bX`8`RNd5V4jf0yyt=)K=E<9v9cV_?F-DyiMn4)ico8EvAuhW;egd=BD`;2 zktLiEE;q8#@tN+ldrA@IF0KpHem6MP&Ab?SOU?DvV*n z+zzld-C0?ph1YBIb>XM?L;Nzkh-&3Nri>IeA(&UGns{#x>UQO4ZZRLtN6pttG*4N- zBYoOWkWp*1046qgLYG^YGFQLZ)T8dZ>Q_E_4#SV)ppC*Fax8BqgGxh=T?{~grPCu5 zvL~ngSJOk}y=#sGE{2hw_;apZV$!BNqvXk0OpDsi?5!KouK6!6wnWKyGz~9JgbLn2 z*pNWrmkXUq1#_TFGq|>V&Ra-PA1=b7H1sN;a@?s>b=%pqr{q7)t@0HvBft1%u)qqb zR^jIEFI^SJfGbWe?`<)^27W_0ZXSi59+P+N9WQTX{@iTlYB}8}2_U^1u{U~87Pzt+ z#NX`V6@P=_q#7n$qA2!q9A7Z)TpCO|Cxl~M=V`ahF%^0}yYSZ42Hcc|f370YdD4vr z*&gCNM;AT`ExVyKalk^7HYX`aD1yk>MJYo<`iCO5jqBtJ(PW#=5cvqIv67EJH}Pmc zWh{4CwGv9|KE@h1`@cLrO^4Z~AsaQUnvX1J-a<*RD=ZC?fyxszfCc3 zpmE;lV;mp#+R7 zt4YP-B*&N2!K2`9A|s@qjK58nX0{t(d}>0?Qn4|1wcY;|0hnlP{u>1P%KiB??^|I2 zh-M{u{;XDSWa}kpC~fRjuAjrv+v>-dxhU0IW}mBpy{yGRsH zMDtZ!X2M*>jafzfT1mM#)~~o$@mMr<)9cKzm9P}nqon-sM?M#Cv%q<8GR~b{S{8_@ zuizHoY(l8Bj^C`dJhc5KTCxyglYZ*Gd!x*GjHl|d;ouZ~$~?}SY_?%O0-fo4X?qS5 zRT7-tW=Y}_3~B?f#`sTX8hB<|&swT{AF~vHJu5-X^%TEsQls_K^};`wb>Pg!g}18K zu{gMKcs{raVZ*(W2e6v=onygmqCL;v-@z#ro6f1HMS{VkfQEhz7lei$r$+c%=8?p) zr;_VT>&z`ZO8dddgsQ$wZP$3I3n1x@P!&toqw2dlGjK-ZfZQ11Wgs@3zlzamybjAM z!KN(-h4JSb7KDDy)@?39oln0-OZQ80s-dnC?T>Q)fcYZGfsbB~UmFETLN9%M4} zB;~v(<2cQjMa-Ku(uNz3$*Ba)pvPR_GVAEk%Zi7jIJD#0xjd^RrR zi#z^-!e4R0VZVm;m}YUM&{jh}^T*Z$c#nX7u;+yXTHk=#2T~;_VdKY?Wqo~pCSgqe zH`P{@Nmdx0{JC$^IemlNJDG%NgXu7`AT0Lj7HWlg!7u%;o1A%A0*=v@Z1zZQs*4lP z;0H5BmQ84cgBT&dZ7Co@C7APayiBNsiKTEDim|9RnWeDvysSJ~q6rbNiR6czq> zG)N0mchrG(DIjivUw~Gx4Z0w7j`rE_rvU9)tk*WazSGJ%|0$yyEb3S|rL)@kl_P<@ z&cDi~6R#ScYdi3=ZeIE#r0?wQ=8ZCh>kAUSJBz2{CEJyv2i-YBAt;AUS@GpE zu}arsF0zd>G@Q|1af4!*g-)sxKuhYlYfhU9@*@u|Ca^rZ@;W=|)liQ%bw*LpcDhQq zVS;>ULL;c=)cdQ|jic~V@F>3zqi`JKY_biE-~^v`F!-H_URD%wzg;SNCA*7SI^4@q z)a7TZ3O+P=83=a1Vm6cR*bkE2N{82gP9sZ#zLNfGxEGVt?OgHhCU`3V=2eJFcacrc z(|qH!d0@R{tS7y5MUQ0Up2)X^K0@{sG9UY;B!=(TqnvTqJCnXz;m*Qi^lzkBUc0C} z&FJI5G)S7}(UX~4MI0?50e3-$Ux5_z_)$`Cf@u0_6Xa6QTk+LeQuw6pJ=-gnjlSG! z%o(~Rg9IE;5(2(#*{6n9HkK#N180g9D~zl{r=_ASv3|{Ih;)*hd`@k9Ta_=9TdvPH zpu1RAE?nlSr|cP9ydl2RmwH23i$xsgzr$>)R@|?>&aq#Z3HC&!bAg+uD{LyZLn=?R z>QqkW@W_k39Eui;jn;v5%ZOS1nX1+@womMzs1-Q`wQn+_W#*R|H0JIq={|2%Pc6GH zE?TM|IXnB{V+3_d=6fBPh$O?tsY)!NDmh@!v!)&+8FJ!JiocMXkaR^gLU5R7pz9JW z4BTVCMTlso9w1|J{`im^fR)Z*izI&&Nx}T>LFk(biV;9uIaHYL5P_#nWSox(v5iPN z->pEtDu`^N*`+A0nD3Vp;5)u<7Zg!sGr+3r57aT4oPZ@3{#Na=95RTYNm7FwRI4$p z&nvA=jJ`l2RnlZ0QppGAfhG{$Kji7XNsYU((2(Zc<*1Pwj~2;eOFO*E!uf0Y$a&R^ z9{<2Gpu^kHpTOsLVEt!8N+?^)=L;7RPOOeBF(Z64n9t!sUH858xSCc%p%z2?tl9eW zcgOF^=brxjNx-U-Lwq;T8oSvMoCS!$%WW+^qB|9?9)sPoH1zwVn9nl5oyFtZWbdff zG<9bC^#_0v6(xcbD&3fGJ)=w^$#KvMXCD@7OhWz-F^;pf#;AQHosTpDsONjJiC10` zT6*@uvBWCwqwkquR1t)p7+qo=tMEzoY+xYCqHAsoV2l!#cYlQC=Rya^YXn-Iqmy*v z*aE*X8$}@6B6CwWxMXrc?ONLf?VTRrnS-sVy`~xp;zWcP3+HY*=7l&v+1xm2Eg*RG zAelCPM8^~@Q?+;k+{>qXlzyu%XSIxTIwWB@)A}U*k*(YPJ6%gOmuaZ~QOa@^hKkE^ zex1G=>&NRP)`zcJB_iG`J$lriz;~!U=Ystruwmmb?Iu0=Y@_KhQyE8SC@RJFRc&n3 zKrBFl+1Ch{i(&~emR78qt~(yLS~@%pJQvPA7h>?*h+0RBeed}hesG#KqoIw>*yeax zBfgL?o&1b*R)BWanA|J;M;jDMH%x2lR&vCCgJ6Aer~k*>?zl7iF|qDm1|2&1*RTB2 zCJS8Q4y3S6khfwSP*8`BXPTHN6LDU*LVNiV&0}*`RBd5`44WBd4B_cm6Z8Xv_gWsf zx}u}A;j@aW#gD7OR*~E{fj{Z>^gv?gMU(WbZ$RbfcH0 zZzVh$r>CDaerKK)mGNi?BG9dPfGMAUotF@bdgtHSlHVUPPEt)!A>CdAW93ui^4ENL zF#i6&7x!EGN)SQs#OIy4S#u7t8(%u9pK1K!{$Q>2`^Oi$tRV0J+>%agb*6if=8d+< z4>{V|g;nMz)|4QS7jS|iB)Cb1m&&ZEp(B&8>|jNMr_f=S0KJ3tf!IN zB3+7|NXqYTTlNvJm1zto#c0=LjmFr=v9ynMZuc)~HF93zVEae0wmf=ot(`Lsp z!L3B;@+5ZeTZ{J4&9CSQH}MmUU-$kh1Qt5t*qr4d9H@{%y-o?L#kpjVEBwx$WGHG+ z9A(x1(&1h|M>j;nrGr^<3h&0CN1{*%AAN1#ZoGawfqs0$T;_Z7ZnE~0jP;pMo{&y= zOeG_w>}Y5Z*7v9EfGPtAawKBapLO)n_FS>XCrGcFUTCUYG`VkGP$&Cpl2f~B$WpX% z%bF}=y_r~Bq=7kgAac+9N0vRcwT1pe`P=xA*{ZIWM%$)-pW(!4x(~Y@fHjVYC1407 zi9uwsdw%8hF$@azN4^tfbpk~dEK5IiH$00!PiFN6Yf~_PVp}#CIsIO_ga3K+d}gCbHj6iT}~W`O$jWY=6=xrZp#L zMV`j#v7L8q&|;fPKDMV+BdkjQ7x%@_AL-=*j9Edl0}roAvP^Q{4{`FwWNITFPzz4g6-7_8^hVC5EbLS0Gp=_>zI?G^(VhSPJHw_ zCSJ-g=o#gN6$h9PnlKmgmpG$-hoeXVdd!-Kfx}gEIP!u!1Rrmp&x6zLdRBeiuIbqw%OgSEn5&%(Yq^#=BD53x zX~Fx66o|LkE~$ZQHu?cRNN9UX-q`l3Z{*!HZ2Dfw$oTl(0W!-f0RVHxV|;N^P^Xj} zw>c=|!{>K_dNFH{-;s?*rM=0$ENoNnQP1-!^da)2boc#mg0|18{eD?d*!_uYk{&JQ zTsP;rrxZpT#y7#KPXL}O=<;}ka&D9Q2c22KiigPwr{wo}vpT||(zdp4R)dk+?7DF8 zXZ3aSUjR%WeK9&6#V&}~tH1V5n#cJf*)Wjga+ykkhnmq-I-MG@)LxvM!HX;1yie~L z*c_AviSN^*?Ge)Ky%AG&agI4YwP7jutydjWn-=NU(H&$lVf`Za^FMsG1fAmpBG*eN zkZz3lw&kf>x-ECyKxv-6(&N=H7UUl~*O@JbBx<5rWIE{XlWneneDCX!_wMBctRAQ5 zXxZi^AZwbnG6HlvNoeW3CZv14<$7N2+zD?HD^6CqPf0TTocU$MTE6%{^ zQSOA7p+ZPXZ7Z+E2__P8)&@ZDAKq6+)9p**kPrT$1Dfo8_6&)I0h4*^s?E{AzEb*gnvu_)lrH3|eP z$rBlGoY_MO(x<5yi%#&qtmN9VVV!Ly=kt@2=|^-dRBKX{`a#vRGm!m|ItU#dGvn)x zhyiX#C8j~7&)#w2w9@|O(qQivuNbt2$Hcz{SPWq=B?`AHA&#&%QuS#t6wF3wO3mq|T?* zhRnErdyVDj5f2dAfnv|6t%LpAU+kz{N{S5L1J0K-e>V;^Pj|eRdGP&_%+EfuM>nP? zA`ma}#~#@4@5#6#7D9mf-uOh%BzczGG+SCW6`@k$4A6TU3+`L8;{9S3H~$s^Vpn+i ziEjMHZv^&>?Lt&AA1N5;qFo{s=WX)<^O&C6TtC~Y9_!T7A2J!+QIE8DyOtVVGdprn z`Cl&2j~o`STtul%LmuR;PR<8mTE%Q`Pjto0{FqL4ITM!`uJY0wp;PJodc~R{nu$*# z@NVnA40l@j&9wnt%ZZ@ZfWuR_$}(V05LTX^0-yA% z#0KskXNkhkB=?F6ljUV3R?qv3w?BtLZlHOJPOgR7yt{hQQRqBaUI;(T(_Iv}!F@OZ zG4+WP(yfAuS5oq$G-J&lk+(CD0$YB!H#xSu@ zLM1C`f_JBW>(g8EkN+sMD*44qq8*hd>=rS#S}Lh`dzw`9k;?0^f_BuVr*!swrf+*s zKjR6tCnHrr25vXTgzstQ(dVMMu38sH+z%?%Cv1G4Yboa(PN!wB)wWs>e1{+r0 zyU^A6;j5z8uYe#vD|QCxN9G=j_P1ZeCdlC=BncqO9YS=)Jm+RYMPQ(B$Kg&xgSIvl zh&|H@^v!iOQ013F#a1k#BV|;mP(uajtPH2#ny`!<%{+((Z>vOd&@@B7E+SpN3Gook z_aZRY<&+Gdpq^@Wc+DWBIwHF)-n1(4UbOiwO3!lG3Z5)1ON$0qaga}m4o?*oz~6+Z zHt$O@)L*Out7g8*`yd%_bLnw+nO1%HIPRDBIMSn{mP5C3I(Q8j?Iky!ZF0E8ha2C| zw(P!2+w9<38b&pfXR&BFmJa!-LeA@&MaGvKtYKy>gJuD^d3%Q&jJkj5bFkEAZLMw> z7ee}Edq`UAp*KxsZ(=bUm`vokIOPCnJ7mX%tmcp~Y70bx{a>y&WEb#*MaEb*&m^nH zHA6nyZCkzx$_$uAd*HC3uGKb*up<0Vsk7~LtCzW%hvpbFID6%mI)kfmXg_4n?Rq^& zjNh&pGS_U@aqQ0Dmy84jjTo1Rz!rCRiUNwL3cH9;!GnttD%_=N8&7$W==AIA&3C+M zKfE{60^w_*U=ngez}Y=pndyh+Wb_MMwUIR92a z$+5>d`kmtfR!gqmdk8dNT`mw>wi1=-_~wSr&v){@>*c+`Pdu)!lY-V~B2D5~{ z5~yF)=nTEhsQAi!FX(I^`1Cf?7{#wx#?F%BA&5u0&r9XdSF?}s)cel6dpk$Y#n)3w z1=bdQ8v5vR=9TNWMKQ0a7s=Wu$143K`xzN|qc3Gb`X3X>-Tl$1T0FnB0)28` zq4?6cs*g9r$bCN+St0F4F^07Kj7v_IGZRTFhzuEhb4R zMq3|P-1Ha0?2s$n*(FPb-ZC`J*&Iwg@7~GY;L<#rlgbwQX>Jd{J7cffc{UF%e)k-x z)Q#i%GN@8!&tuD^CS+cd__PvgT3K4zbvBGNxs`nE^J!C-HsxDJE#1zCS`XpFlu`E* zME~4L%@pA~gvKNM;A*F;&PKh!gP9~J!Kaymzlv1*oX}sMYYs6PIFlL23pA%&J-#^} zYN?{v{bA*IE9yt|z+MOn@Hx7fXGGAjT)nJWw7KNQBx~r2E_yYzz=D3iWx;X6qW-j| z1E-8>tEXoIS*?rs`?Dv0^*bk^E;Ra=FTMrc>CB~YT=Uk}3KBzyZhn!%lW`XpwDUZF zG$%pngBp}5>500%R=4TdzP(CTXM)U9DK-P0hlCt!Zb!P3Lkw7qtz|&vo2OCM`||P~ z4|pGV;d8ns#G2FLb-wtG6Yewn_QwVNwyX^46v}&aWdfX-(~o48F@~o#o1HYPF%XN= zs@so7L_?&({5G&)&xJxA9oxmP=tU@b>9IN5R+tK}w}<>z=g<`wLVw-G+{}{dy89XT zr;wTZba z7bjpOUR|h!8J)#I0<5a1Y?~Lc=BUz=zphhkSK{tCndt6m^LJ#!;R;2+csy^&FUu5& zgkP#*SX$&;7=EJPY9YJc9}yiFv_VFI%d;lN-uF{6%^8m2*i5g)5Y`nJ5J!lIHMuSj zs&pjOvFC_4{3>wMCMcmZH*FU(h93KTNx}hm+cOWbY|RJ^Ypk*TO@>v)*=@n}v`a%- z8GM(;F^OlpPAC=A!I_Dgb3Y-6-JYSnLXGf{iz^Yd5w^{yeSBAqsd%5Mw%8tlSM|k; zhu7{JS>=IY9bqudFM|%_B>elcOu$w9A>L+Kjv?g6osAGIdAW}n;Mm~IzErQ5q02C_ z=azO12mUy&nX3@h$=`ayZN!~UILfS3+V^N%D}mMpMX}a-nr0k)3&##^#kCz*i|9Ye z(%pZ3v|@Yv#dwF;eU4EZa+Qe3?R~$xn&$?6J*=KnM~JVM>Bl-m+^ronO5V)=D)p2l zA`ti?{A_G@A)x&@9(Gsm+gdQ-(_YxPQM}5Fd6%kt+nfdCKq?CSfd*80LS9L$UqGgL z5sDy?Keab}@@xO+^E-7LOOO$5rq(U-@Sx7H957W4wBY+D@<96Mj;jg|Dk+p>7i19@ zy2uY&_`a}7zm?b@lqf#!-4aQK{*V`Da#)$zTog&sJq+*Ml?M$sTIES{r&DyeTtGVl zR({``v7F2jIlVVBjQ!eJ?fg&g$vC_1dZ-PgDg zds-XKo-6RpsW)(a>@8FKSv1X%Vfa!6n7N6`T+qC78ap)~$AyWUFC17}gA!vQehp$; z-DgWhtdD|fOMJqeWdRATtYXGBAx})bva@YTHZNy+W`gQ5na(U3@?A$R&$JliKkOh} z9g#N}7vwX}W@mkCqxRYzOuqW{-hPG{yz^l@IjWlt7ZE-?zO?0YbwQ7}Rz5t8bRm!~ z)Gnx%)NPgD#CJ+5>7KBhI++15R`%KSz`9CH(M%JtGCd>a!mnd@9jp64V=p*!Dia2#4v zV2tXzcn7!6PJ&CkPL=hCNmg;}Ru25R>MUD6+;4$R9x*72^ymeh1f_F9J?ri3)}yVo zEBobaQcEn#l(Uz6Wulg;7vg1nVVjv&aq0;YNIycK43>Ibqp(_2k|37QqH} zYA{PN&%1ZxVdAI-`pRMU`9aaHd!qn+MNX3R*xo9hweGBz_2F^|xlzCLo)@J-z>np0 zS4=|!oC+>AAo|G%K0Oka@jJ_4mu3G0+;Ag!`1z}5xirdo_s@|~&W|=m2`!Juk|8s| zuF!+NFi;XEX6S|VfcdYznM%jSa0MXDi+_Xt32dwu>bYS#)J@5oqSJ)}b*A#r zMpW)?b&~}yVsd*;@Z-bH>>)FLycQK|>ix_+ z=sfTl&?u`j%kQS{1E&1UliM?aIPJjgmc=0Q^6&jttAR^lvovIzjz-71ii3}P;x+j? zHY*7u}1&=A$36 zR5P}<(@lwbUi)c;o~;>3+)?ak3IPzd!2=Yu<|pQ8A46~$L%+E(al@89(sDyL<{ zU*pd*Lpilmv-zWq>;{ zbqwm&SB7x}viMJ~4mEC#ePX|TU}P795-ie`SsH(_Q_4HQNEO;bhh}>Zb@TKH%EwqU#p8)J;oN(ACXe7)OVc9+2FmDsYMAo*W3>&bNbBkroBzA0mG$ zDO^FXIp$>Zv}-=52?Dmd!1!x>Uk?TKXpi7_kOoY3J0tj%cU@**xO=w+;S^w|*b4g{ z&U<|740X9ZIE7)-x#!fu<2fhlF;r+PmEl&7!^%Wo@|ueT9i%C{hq*a zi#NX!k==XGDEVmlEDuD7N@-$c=xu~Jakbam!GeeeD1&41VSKrX@}jdpy@#ck)07X z`M=rMkte=uk@Q+MM!V<^_BIsJ(QYU)YR_ovl!nj9^P)$L5g^NN{yfm>fS{M4(Lx1} z+o00gUhsM`-pv7B8po6Db$~>yOWF_@p7@-a;SbbXiCEXA7L0tc-+|E9M z61BG5CA=GGfy{!H74-Gf@3|ICbF+lK=)+(6x_~geCeFW(mfk$li+`!7OlvXGYIw7u z!(|uy)#u|gnTD(j3*~k^3T#$qauFxB(xM!{5{zo4b=e=DED|q2ws9$CD!%p?Jfv`S zn4HJ@^zjmR*(0+Tp097S@zbhpADH)~3Dq#=YM~3k_dc9ptKqVyy}+xtS^50Z9=Ihbl>`U5uw{AS+Z!HZTE zBAk;6->xv{t5Z!EDP^BI9ovs}m)@4wqO&@y6caEn-xAo3Z_3y&qKYN5?g7(24D&Vb z3Q?qPzh+|BQDF{KUgCK@MlryW*;$f_XmFmZN9a( z_9i2?2GH;#;w%yRdhl`Lt4%Ti_0t>o)@srKicG}mX%yNHv1QEttZ%zzv_5R;Tdo=y zeF~Us5S=-UUVkXRd2*(PowIMuJ(Hs^-$A zjzDBg_@}7<2Zk`JbY-sn&S;<3PNmYHVBdfLwzlM8({;C8V%mxMFVFsoLj3XlJycQQ z>r!ZK)~wn;g~LB@vVR)QsNV&BQt+wUn9wjQ>-_UM5zV)~%a-oGjRr;@EwZBfaIQ(> z$-8)vRFiuGU-fp+`Ioj7p3>+Pyf<8eE*bux>VI^V_Wa(pLdL)CbbmGc&we;PymEWa z)Y;S3?EmzhU#UNr(fv;iSN|`^0n_R+dFj%XQneR~`v0|1*;R d|Hk1Y?0Gk-Q_-~S!%M`Env&Lw>SxyP{}*j&)EEE& literal 81389 zcmeFZXHZjN*EXssiUmOt5s)S#(xppBks?SDq?e%dE+zCLQbc+SC{^h_NDD0lMd`hT z9+46tK!8Ai(9Y(0=biZk&Y$z^%sG!UWU}3pz3;Z#wXU^3sj0}5UZuHu?%X-j7YfhS z&z-wibne`RHj+!g|450Ha-TbQ-r7n=M(u@+45ONpgSnNh*|~F%V!$!4IAq@3ZSm9R z;0=wAuP|77`(*qx7u$Ggh@RZ^>hiJxpR=e~tPRS9LE1i^Hh*#~f@AKP6Kf5#lUPq( zUwFN0&)$Cbk&ow*D)zjaG_2@@d;mPd1e$ZJA)<>5G)`e(yI>ypX7Z8lx{D{ZE~*mj zGx1k_%l!4EW`ll>=+=$yOC3ACZH!E>-aemIVt(;~Z8TShoQ2)u>HHEW-I2S>phVnXOUFPiBabS_ zwYHUnz@(oo_hqj)laPG;#r9n1ARUFB8jNHx&QUKM;!Yk@C+@!A>)1EAKhQgHWynSe zwpE*D)N1$ePcf)j+Edii~FCrfW?1ert`vFS@|40@SWt``HxoT zE&|`q1AjEY-??){Uy1(re=Zh%z3{)^FSH%sB)LE1dhVR`xfjo$zIH#qJb9)5<t zUPuw+cQaq@ggaZzcl!7+kag)cpi{6OzSyH9_x-6T?6%im6~Y zgWsZ}Vx2LfWhTBlOZF#pQ}YJB_yfbbEnX;c&ppAt=fX1c89!zGqxuJ0iRo;75s+&f zMya(Z;Z2%(zx1jWgmCgLjPIpy+;R7r2y>dMK`WR)<3N@wVtL>07(-((Y8mN12{}D5 z#y`r^x#!IvR%~1^+@x>FxqVB=BMyD{)C3sczgP7)mMJv;$?o&WAEN51a@I6=ZjlI7 zToo$nN8i-hRYyNL>oFkz@MndcftrNI>H)d+JyVtYXHHsPIw1dY9X_EFGpsa(>xMW& zBY17xR)$*)QVgkdwqfQFbn%rUaoG|1bLTITOaJ>pqSwe@(NDXOcrk#qa8s0sL-Xrd z!!^}y6s+h^HhWmqkI`vTq&#!)m~MuIg_UDxYV8=V8wWJ9pLtFvKOkG%Wx2JC`E%#Ers+s|M$dvR||`1%^m`eDcjtUJ>zvG z3XHv_?X|k4-zfbdEG;}|4Lg|VN3iBFX~30Y;GF-iJ@Z}XFYaG)S@MP!8N#OaiN10d zlstX?SoQ)G*hwL*qvNC~-)bfABDY1Lrs%C3XD%bf6YPX@C(pS{w7^L>l-Jv(k^4=; z;YUc3IcwN;=a`Frs&rqtdEx*ZU+!Na*V!GIzIWDKMuZ4_5g|?HmnnJcC57^k))a%_H*)(!0;yHDnan~)YTa9IXzs;uppp{jP&VS+d z6Yx@ec>3eEef@3QH&gW&OYN&0h=o5t0?ItQKt`19@!TwC9SK{WkpZWVoU|5k z279Tf?|$HQPxI23X&Z>5hQ!X#yZN2Fo+aLVIGLKSdX=0gp4>_4rUKeA@ET!^k|RDb zNUD4L|Mq$2ePGf;`^6Eq2X7!jPCILF7Dv`yM@tDmmBGj?%9W0;*Z)&65uqxHhJFdZ7d1vV+au5Fy=$q^?FE{Lzn9L_RQrgz9{;F*v z=+`kjI~6HhEG)7)K2HpT$rxBfliIbZCEr%KthyB5=F%I~ri?sG$3OR%{&k^n)VX1u zRv`ZsG;K25Aw4bj^XG<|8mGBf6&N`F4pJ}?qYsNyPKNE6rV=69 z#6>JeY<>NTlNVY)TiTy!zQzOP%UW*k!-5$uPBdwWe37r7oz-FJrXa(tfYVe-6K6yD zAsXwC@GD}b9SK}zen;nV9YF*LJd~0{0cWn9{o#9vhgN%YMDi>ebf=@rZoJC^IO0Q= zGmdOV^`EJG4c(`Sn=LCivaE-254gFz(V4!4MXKE`82> z&!jx9t50EqdcL}By4^&j4eVx#@9b9nA)VB_Be+pd&|l-M37J0ERsVl&S~TurKdbZ` zq0d5Hdq?kKN$a!kS+g(!+q!j87H}F=D9A<7C$Cn+F35b0 zcqe8sxVBBDvq&(VX;F>sL97i83xHDRJz7>^)ILMXSE;yX@hP3Ffd%OYlW3G5nc@*E z{nqF&!#dYbxyhQkC=oHDLWMeg>}fUqgCwh~_}Y8{`Osv8Pu+g?yO^FjO8y)yxCY~z zj+RheAjufFhyTWv!jDpUlh_3Xho*WgnFU!i>w`28O9psuRauT|r7`evHUY*UD)U*BxB@Q%ES13MsRM19ug_3bGPr8vdHf&<80SD1M2V#3UF78WXo?Z!G_ zsP3N5x$__jTdB+5GbEZWppi>rrHkS>8( zoX!2FF%|J_hRtY^9tvl)55}VveQ0E2n7hZ|uHfRLrWG1lMI*8#>#_gc*T{k2qy5PA zZ&sU853#dau0Uk^TGi&_B=hHv4+VuF-%SbP$7}i$y_I#HN2@k$c15%abS$5JyCcyb z327>hWLsX&QvaHF|B;&^3nUzdArsp9hK0ei-E{ju$7|ieV3Qa}&=H@rD#*@7;&5c< z0-u3+L&o=Ng6399=VNz|T?q0ymGm~JsMGvhA_ym*Vvm@DB+|Y?MP%4$?Q$^7NAObl zZ)>xbqZ4>QH+x$Plz3`%mIh1aws~oY@Jm~zDA5#g=J#TLS5EBUcSiEQn_7oNg`t!_ zes)~9)@X!=_k;r7`bx|iP7uV%ivFl~Q6mOFS7hRu}B zB~$VADl>5WZDzkc2*vE`|MK~@_N_k$%+Z!Eh44tdWMBgFl$DF}Xj_pi5wjUCis|5y z!lg9$cv3T$cqZlg368nR>?Io(j*Zt=jr6mu`X`~0moG~R|0==SnXHvxnCli39S=h= zNO%rfqIEp%CX%NLl5J9p_27Ekd0JhVT{XH*WY0HGIoGL{{MAvbpYRj*BZU<>qSm+s zF3I1(q|i8nYE^OkLes{d3)?T$W4CXD_@#CSE#h3B4-IZV9RCrn`AVipRVtM`f<_Q0 z%BPguzcP*hh2G}2HN0a<3rE%KnvFmCOc&n4 zoQJzx82sK2wly@eXZlu+-7H3^fTOfj$AbPI)IKU!||s&o+U z1x`$ppx>a@q7!p9Z~-*7SvYSQI}_6nj1_5S8uey8(%JJJei=I1ZKa>UWyq;73{8~M z=7t1aYl!2KOlHEu_abl3iqC5y$38_xCEj9D)Tvwajf)8Ck};Ir^G@i|pPHPDba?}@ zncKtGwb(sK!qiv|NIFfDF1pGc-0S~nHqe!_kfwZl;?BP=QR6hxP1-lvnST2aX8Fh+ zF}eij+BL5p`pE-HOqA5oCUyPdLP42X2LXPl7!@N|0`cKegk~mRrDV4}EY$h_0mFjF zx5cr_%9nPf?0#9;lmX2R=4&*lm=SiOq_{UdRv}obC5|QAn;v98Hc2 z@}Dp+g$a*NXLzWAN>4D1>gxB|vhf?$JsjOuZFOK!kovg)jj7=q#iY#3 z#+4JX+gk-j>kAyI{Z(&fKtd})XTE4vaJE0 zpI?bEF?k0rIRdu8r{oKz?Ra@+1C)4PW~*o*SM}qP7B84#!lXdVgZdxU$=Gr&kjHNQ znU5yilMk>`^(i9GmYtsy=d!{>AZ_w^1DU_<1<|Aj8!gCM=p{D?&EC>^kls zcO|e`(|q;SA7j^e!mmFn>KDuabR)}9A;|Kuo8VNHr}RWoT|Kr&4g$6Et0njO_AH2R z^0psHymM1o#$`HRgY{r)apAj(Z;!>Csfue)C@u0t2>d)(`uT-I!WL$iR5-^^T!wxL z|31afcj*0Aj>a9OCOLsS{A_5RSG`h@ZR9@PO)8B8^~ymjrqswsflYdbp&o8}%{q<(-Wf`CxQ(i% z;l5HL7tD3wK*-h|Tx+5$p;lXg1y=)BgpE~|zyPLLzzN%qER zXJQIb@p0iAui8DdPPc)p*m9oUxg%IPlHMW@CFiVuvqZzC%Bl4kR@42!Pke7@ zEurY*0N9jgc_noj>RLf6Pqq?dgHYK-*VeW%{c#}ML>|>+Xchk4)ALWcda3bKHiPQu zb!cgg;jbNu?Fsh&uTOo-i}eozRfH!8jY4Udi|Qd1tSYS^$S<>a;a9itkM?xR&6Vdm zpz|YpCA22Z;evAhmA64ZqDo$6(*u6F)Qs9}_{Xt@*FO`uN+9=TL#wCwipC)v+8ev# zE7&9WcEf~+JlX|3-*fcT<(IK*f{tsCituX02_+85JXz7wU_KXJc;j4Clq5bs^ZbiC zT89|gaadd!cY&P>8;4QR=LbL_DWzQ;`@OlfApBW;VZAgk=Goi z;nSzXBLM-0BCl6;FbiEVq>zKTUC!R5=}SBB`BAi4Z)5HQx792cG{|>O@XM3MRF{_G z3(6%{wsvx}+PRav90!-h?MnD_d=iBnj6~emzSi^a8d8he1|mA1IdLaU$`JihKA|G+ zG5?W*kPO&d_jUcssZ-jEdBlQzxIhBwj8ymdyDo3wK-q6c$1=}R+W)&nzrvbhl3|Hm zIawf~tl6c8OU@G4@fTz^R}U2?&e6`}9TtN4uN6Bx;_dg<2}wTQ3YjuxJB*Bq;W^XfOsN71Z; z12yvKwJMi9`s=ja`UESyA=*ZCtOTCk{vs=ieDjyW7405q!i}N|m8dkIt^!SVZJ|45 zQXS|F^zZ>6VTkQhZFC(X#s=4r>VTRbwMY}n70|ovB2g>?>e-?m*{^g>oj7Wg(b{_N zTPNx6C%sixuHF{QlJ%SIFtbRC~*QlZA_7W2~oVZ8km5XWjOw^Wh9V9ZXW-~VII9%NIY6|B3s5$IID^T4& z*16^exLo#Nmj{C!JQ0p!2@pEN6cAN>RB*-sHNVEQt?g;-D6nQ&xX8y7BBD?9ib99Y zrZ9nPcD$xsR{ly*#zJcW;57#Q)uNt~LAXD8vrl_INb%4<=gm-fRBlZ~N!b#@_M)_> z^E~i5_lV*=*;UOg|kNUOIPNw7v9O53!eS)ev&=%Ve!2 z`Q|~o*Xoy-TcA?1P|a;)y1b}=$MeU)KksT${Ht7~&-(7mn!1kEFIw<|DM;Bu%kCR$ z-MKLZx=Fs3fTK2_?Wq;dCx2EvwT-ybyZ03eXu~SJIOaMM z-o2y7%_sSoyyRBl>h#8p$O#i4Lm5)rs3>yv4ku$R`CgJ{huQ4fH}!J4t?LjMc(##5 zT{3fpfIxxTH}7DiseDjY)`OXA?yBu%s2)LNWxuQ0YZu=2V}FoAC3y*K3c+k41?(Rn z_A8`4U9j_>PbRB;Dc2A3HL_H(^cf(&PTM{n4sKBGlct-pj*boK-!yi84c1ac<}|3R zd-5!+wSCKgA=MFLxZt1=Uhy^$_wO#h z-(|NutEISHyz=w&%giuZqm|ByeTS*Bo_v~r|NhNNSqH=Wvz3xT@PWm^FKjJQ>Cwf_ zaA8^yow%uY#M;jKOh(uT?UL6o+NnXz*TEtH=C?dnIqseAbF>^cQRAH4GY|usBBBNL zpkRIo+wofEShv-90LxP>G5DmOr^W;Th~e?PFJ0i=tO<0@C^TWqFPVD@>X+Onz{jaI znYsgemV6>K*}wMWVd=&ZP0CsLZV2Uh6t4R19H|G{zXtt(d~A6DDitZHvpf;`9-mwC zJ)(VmxpEz^C zrS$SOa-HA447lLa_ugk-Bq_Q#WGHYVGB|!gh|hVV!V#>=6#2hV!|}a$@#p*@nYCff zC)})K-TIwJ+L&_Br<3-Kdy%&oOPyZ3_34~mGDbWAAiqUKdB~i;*Z2xpvB)KEny`Q6 z{uoO9)o{*Vb9tNB;fz$w6UFz9?XMhlV4Pi28+ z>9jFX6ib@P_p83xBG8c@48mTZkEPmkr@2f#W|czzN(!vcsX z07mRGSO^+=dSRYC1=RHw_;cFRD%t15iov%Mi~*OmrVKwU~$@? zrgjycF%}haz#2W{>61IXWQ>;q`#txydtUSOSkAMO0JigI!?3p`0v}JCus!# z@w4gtLx{D+V&lwK>mBUcHs(6ontZ&*0fg%#8J%lk-BBarqE?Bt?HO5a-UsA@8btC@ z9xr@BWdgyL9+amw**X7ClIP0C*%8<&cp?C;JmAOv5IiZm$9_BSee&;Y{!{xxc2oCd zf^Q_EFOhAyiZ+=jC343N*&N2s&Zes80id0BL(w3jAZ@lQ8M!hBAi^EXqaYjIB@ z0K16g0ZH!SdNj-4^{J|tJ-<@3RKSs~^ZhLzd<@Nle06On{32c#>AWf_a0?pusQc?j z+Y(6^$8cx=5rCB@#K*tR*T_v6EYZ~H7R7M(6r9Q5swR1^X`H>1w23#h8m{;xAn+_p zx}zQ9r+}4=YAqNl95BpyIv96>K2gjbYnm?RxtTZNj0(UX7WG)PHyzNcx*hznxZFNz zxEh}b@=y+jKkhGq(t4thMsys?$p$m725t@ePF0&r1FAO|BvV!=>|nr(e7v6U>sMZ; zDpKn#mQ4(KND`Yr)$2lYgkO)5j~VLl0N;R!0Pw*C8~q9uDj{8@rRC=HqukldMgAh0 zMPCLl@ApVdx+j4eK<1v;mMS`Y-Cn_~@r>5FLB|hV0*B$-Zd){3R=>A%e2oKb4jybF z`V8%<$^%ZDO-4oljDc_VTh@u;?=tniKd(BzCmPhFO>KTru>?R1fdtJrmhtXMa;c_(rvvMB#Dc zy1?sWtQb^;FJ%4>^P@;rzx%Rg?ZdT+0;)%9jZ?@N^`L81W!vCP_w@?sWj$7vR8bbG zgZ;2djv3aMd`F{~>ur~X#*~fch=XNLtwwa_F+Hh2?U6=p#dQXJz4%}zY?K$(b@Vj! zjxqzeE*#&+n*J&BSN}VXSXp^J;_+aa%L8&C*|HUJ+S7+3OmSxV~BER?i9nvOj{gnCS-iT))y1 z!N*lFk%yU3-1_3Q$^)FY=Vr#)hsQE28|I4_gF7si&Dm!r#fOJ-u zYH|7W%1AS|-!a@?j&6>iRZOf~U85FHMXv|J_ZSu)7VA}#lh#S@7$ZLjl>sqvR@E|N#Y7d%QS~`?W+8U(_}dsjj1L%kweJ^5L&^l3CcaV_%mTyb$ONb?FmY8HwR+QB8?{Mtn^H0NGi1 zdG}<>6%fKbXE0sM?vwNTYif~Kp8<(UGkRXUFLj;Glhg9bzn;#2w6gSVATW@9%G!JJ z^qD;8F8~Bu0@tcez^DJY%#a3xqN}3*4^QMK{&SfHL?YR`r6;Hc|6$RQ69a+WtKKUD z=g&G#%D_XrK5(5imPbJ36Z%2r>q)lBe`MbZc&ODKTc-cK;@^+w{y@}s+l5H^EJsFx zg0WPw-1ypAW4Q`Mt{Gn`EQ!uqwj|)8NyPf66c6z3S3qb-Nt`N8an`c+U%WLSlxc$5 zTpV!P)G4?pmT;d{U9~-5<9_az)a|q8ZA%dn7L^zn_@oW;nbE_ERdGuE6F?zPwClmS z%{*ad6kUNntL{z}1?%0Q7v;W8#*o;m7Kl;yxN&;ewMaILRTE8Aq; zFBNs&&)P3P3XzCqXJdOB>%;<}&sSnU-(_%(UpR{U7Ib=w(w4+7k6&3YvAuX9!p5qu zKKJWW-24TXStw}C3EN+J3zm9$%Y)XhtgBZW)AQ$TOqzC|bzV$i z(u(bDqes|RBH)cqb!Fmfv%K>9X4}qNhc`)!BJU~3_%ye&b9vw%QuCzV{$3Mf&b&i* zdVcTUbA9D_K_~9=ERtU04LyhY4yOo*1NWY{tKIWu4*!;jH)eY`?g^rlnhHJ;J5gwv zY4bRr!IfsDAgW!cH5W>nwCrO^jigiU`jz^j?!jCCp*JRuU|x06)Ju6hDo;-%+TzE`3@oZ&r@PZGV5i;Vk>c^PkNCN?_s7x;zzp?)5R_Wd`Su3*mXGZ2So zT+#6yef8Opl|aJb@WK6CI;oE(!}3@@^=WEJt({@MpTpln=5&DZ2tK**5AnVA>e|5h z@nZ?= z&hmPXJ|ku!sRr~}jbrIqjx4zqpd%Hw&z+^0$=?C$#c9U)OvBj^%pqp=eAY71>_v=PhJo|$rV3d=SDz(wnqfeoux+17>sWQrX7eK5Qr;jO^u9MgSUpa{cc7 z^YVN;(TX^|O6!Z0%f|uR+O)}R@3*F7j&RL-v(Qr%^BB)(zB!*=475@mh|Ly<3iT_5 z6Y>9A01*|&`3r?}9ok8$dH{JN|8@SeNQ^O(N<~>2vMNDn2Q2SOlSL{-djs9Uica@S z^WkK50LTOizI#5O0J{E2jQ^0-(LOW4>9la2rqnh_;4U45dqpje#Mq`m^|lNEDw%5S zsw8-+!h2(pY964eIME`$Q|TORbU(}T4xVN_cOG@L&jzZ!o9aGA0n zOMGcLsYuDI&xMs-wx8QcvI!R~Eb6c7+QNtM)0xShaojUx0V&b*ynlT~L0Q5|YHq^0 zaAe%}FZi|=)6s5f}))-~AgueY!RA&_dB9Y$!>tmxY7YBi>O zClxawApXwll%Jt*Xd{mtAgtCgbWZMJvG;RTQe(|Jv|3}cJIsdCZL*$!dh66JqB%>z zKq(BJJ3xJ2fREX1@Wd0{J*i%1>Nrya>9heKupj|0*Ltkeec3~U$}!1({5Y1Thr@mq zndj(l;H=E-JiaAWE4^3mfbK}>(l|2b;R!WS&##7(ujW(!p@~}}!h+3K4i`wnXZss3)y0=gd z)HZ1qm&Db33_y=&35~$9qQ)M^?Qe8Al-Z5nz{RWl5Ifp02!49ug@VGn;xv2xTwU9Mz(6YudL_WANbM^~X3)$F=9zsu&GJ1?tjN0;!zj~)M-?m2vY z8sO>FCtK-b$PHi@H&@29pumf4E|qzK``|_-qHC zPYPDgYz{ISKbVPX_B2Is3HC^A_8w4sdQCkjJV1L$;ZNvVBC3$KMnx)!?@>V1#O=*O z_hH^tA~cOoSCZhk!FydQdLD~?vUOGhnNuAeh?npUQ(l94iDut!K6g*pMe#(xh|6g$ zhPg)5Pcm$Zb15Zo!p3_WeDoJByo(*mZ{BxL0C-XU;{2BH4V+#W_Gmcm?_8nY>$l-^ zp0`-+Ui5c_B3PhNw3w<9{N^J6N@ZbDm_SHbyK=s8HL(^!YVhNAYHpD4R;A|e{h@ct z9X2yl@#ha11fM0_bon5UsvAr+nCmg4h3Vtv+bd$mTF4;ykfPGtw;8q*)v`u@CiOMY zBi;HPVH_O??KDc?0e=4@8AFWyq-Tc^b9hC^tr(v0hP{PMYPq{2{nziUI5Um(M$e~t z)zwy_YFFy8DiEd~ve;{dAY(eGy`irv<8^Ol9>QkAdsXc)U!(W)w0cGc#R_GL9~Qwzoh4^QE}S9wAEfTd=71&j+0Bg}D7) z+?OqtyP5F>vb(c)w5)c(5xj2~`jvR9u!7*Fj~i273}bs!LiAqCFZh8nJ81o1r+k1a zR+kiP6D74?aZlX6Rnpw<`rWl~eVuG3{qZVf9r0^bDTxsh_Z#|&3I~NtU$BNhSSEVW zy7YoJS}@E~@Fn$LP3aINtru92O$!`iMo*K9D zgI{{Hz9u+y%nx{t%8$YAva#aKomDuN>4nPFqDM_<`cXjmJ;C9sfvZCi#zk#{5uH^F z4OLM2sE=kxhy3Ti(&Mx%$SM&&2c)m_V~1fS+q%^Y7Gg#HZkFy~AIDUc0%j^1J7^Et zrbA0*@wc&V>!Y9ox`}^Xh1NDAUZ7-a!nr4gDpZv!t(`i!9IHQj_r+XyJ3Bqg#8 z>xLKQOv8a90ue(s2c4>yRx`e;$pcNdZgtQ4iYhD1kZ|#vd*6pm4*!_I;~7Z#?Q3@B zJpTCCt>hbuLS6+#f-#3eO;_p6oYASU(M0TMIr2t_^-uv*kCbq)-Rr*1#q{>PK9erC zP-p`_L7brAfE#-nKuAVtUvBzy2l-8E}=i~r5*LD$u?D#G%qB4Ain0FR<&K3h7MsOCEL0d<=IUG8T=;WwP^ox zrGL?nRW(f@$BXNxT|j~^s5>T2qR#vYq~F)_9Ia>)rca)rkSxTtV&wpkR9^|$4muMY z@XHRJxZxF-f|<4H%&z=2&!XSpuGw!N=}QAdNoA(oO+H9Gv-}nEy*UKVG(1-}PV)1T z)bin0TON!68mSDVpg7o1f1D;BP`C7T^i^*E6 zZ%brSobfI6Qn(>yy7i%a4S5qqYht8Qc?I19N+p5*@7&)ae0rk36Uo1BFA!1y8_7;_ z1J5E+>jp`-uVpgS$)-#awz;1w<|ZKjIhYsa5K48z-GsJpW_!XXx1ymT z$WXYK)R5VF^K;_L5>2Z|$s`!U+n-l=vr%VGkohi7)8%z+u=(LncFY)6%XPQpsT_^T zTB&%UV)dqSK(fZ+{KWb!vluzP3*<-Vs5dcSW)#CVZn|Z+J{#57(W+yi7(l9 zLI+RX;J*G+e|9;Y*5WQd(rrMwyvPVXJl0u-dU|26E=ZHWXoy?lHj&^fv=ty4QA;pB zbbYovQG^FQ8vhCZCtmPR;Ute^E-LH)R;<5w!_$R5{?OU*s2!*KJB+fN?7%LN8oV@E zwrwap26NzYbaX8F*lCw~D>pO3C9o~0D|xaT-JM!hTFpH*mG|lb3|E_(8P2Eg%XjAs zEHv1tK~81zZQxswA3wJ9?E~vRZ;~1s zG`*L8ByhVgXvA%0jM)ZR9M>at)Zfqv(oru-!Tv4-yTn}5A_xi~oqiBgjnp|eq;dE; zj4%q*4qO#{@<^*F&xgC5*00#m0;C5k!SXaO=JAp8y3jr3q7||gcve-h_;y$sD2=hP zBODZKwCt6~#`Y75I{1H{5^$KR$HT$j-H}e(iI-o>BTdrAc_KZ2+{>TyFxd9_Voj3$ zG%_+{+|L*>o(%y0Z?L&JIqih4Rp!#7I6{y@q3u-t@3BonVb~!?`w}pCv;Bv(Ao~5$ z61Ulfn_gmKLN1=gQ1$j5>nxzK-U;Mjw>+Y~5@W7K*8?J5Sw%p@Tt2`({ExMA3;oto7|#9(6WbEHkseW^&n?1DXFm4axG* z9h(4f02R}xCe1p-pq~>L^t5Gy!ym)I?I|ixZThof#vd{FcHycAfc28Q2E!{ zPG*G#`?x4COOy2IP3G4A?rgic_PGO}0qcvk;4z^114%cjQoUS#NuK*-*DL!&xsSUv zB9%5s`#mAI36FI6ih~UD_Xg(}j7HH{%FiFk^TKnur^LtayuG*gn2Syi8l$I1O$owV zlw4bOV9m6+=q363fNI$}|3t9)hnu`0tMQCE{ZF^}eWE_by0BPpp=mZa z#p@J->VZyh{3b`>@Y`7er_cP+)L!Gh8&<-N8gmWor0>NRjlU2@hOu+W(gi9)BYHRGarKDa zRt)J+mr|d-Ds{vm#5Y~*+cOUYn%R->_5sx=SKP~gLQBSBAdLU8&}WfGZT(U6ID8YG z9DZdzT(9OL&_8OCZpYmy=`aOg^BMsG)i!IIU{aQY+@l#nl+jQ03vQ_eeWSfoZSSax4#oFd9iVQKlS3^<{!c z7Itf+Yc&(N4Oj1IE#}oUH|v?9Q{779xeXOJu_{U}k#6QTkF6_N%MpUs29^c|NB6by zSSj+&rt&err&7_^{?T3MFV(&VRv&tx9s-}M?gtf1<1=z zqo8=QFB*g^-#y| zmG1WR2{4M$hKQFzM;DZ+U*%oXbM7Je&yBf(;qmnzE}K6otx)tAekp$Wtj*Pf&jQvim9rg*+9h=*UsP{9 zu)3svBWN44UGp_C4OYK!Nt{_5WQe-%4pe$&WmrRlz7|&)B8!j=VJr1_!4_uZ| zT*9bFfOWmaE7_VeKxxr)DCc4dNdR)Cq@I1?1NNzFO=)pnSd4k3iU+RvQyEaFS5u`+ zA=2@9fG1DcW80_iv62#;0~*b3D4IOJj}CwD+vYeMg3d-UU+6^CjEjO}yjB}DxNGxr zz8}7pH0k=TxzRP@U^Y}BI8R*r5E2<)-p-O}U}^{W{Ss`#lOx?a(4xrE#6s;Ovq1gy zF-+blWxzt36HEdO@w1WV$VRpVc{80c+x%Kz|SE{;;^FP@gcS* zoLAGWCDL@){*WvOKBmXqN92vd$9*V5}EqVXiu5@x`X-GO#&@N{S<6f z7M$gbjx_2qb5T3GytnYGDXU|WEJ;(4vCl6}77?99^g-p{iF|uhsBbaC~_N)~p%}YaY+-inu!0@x?~R>Mq5y zHW64fm3!7cLNV#FW0{9)s=6npShz;&QRwz+!`_WAHr{_42?eM`kk88SShBq}D;rSD z#G39q?J6FfTC@pb+|E6LNlG^pty60|x<&v{U2E1SXxJsr5&#J)E~z_j2huqp0xIp3 z)hBnhI7Nva7NVazjTh62s|MLh0SWGAl01vK1}|(G;ttx=+3E+o8S{^)5JU;{6*XMK2S*a#z1<3y~Ds^u2* zg*t#!#f5kBUHsgJ%LU?-2vg(Dt%VggRXw?xn5M4R(R=?ff;P}JM$!sv8Bn#tR;d@T z(df%b{yAH}pTh0Ck`P32arULf!DhC&7A0qs*LGW^;C5k@ps{YVQaXf-`7t`3%+g7TSr(j4X105H)gQ0_n5lc>tBSz;NeW3gL6 zzv&1+ay9AVKR7zaqI4(G^6LHv4{AegyFI*V=D{bRNy+!R6y zIykXaz@>CJfaL$y`M4o^`ri8i09j9&3aB^(Sr6d_Q2x6ZHKTvy^Z%q2-_-)(`G8k6 z{)Ey!MpJnKAfM8Nt{rm*w5kHI`JFt;`h8DWSm2Ve8Srr47Rh*=f#f$X0xPD1eL8l+ zmpe`z{Q3+eF~Y%*pl4(%o)`jH{Gx=y-suhUze^FKU<@Wm{nE%SD!!{R2R`AY0GIxE zZDH~ck4xKAIex?|z7U}lKVI|Y~iKtCBAbFr*}!Z^*DFrw#q-M(#o)g zA_cNdbGz7VAWH{{W7Tb8uMQT2&MUUS3nI_x&~BzOA>sm0M1? zP50#Q=);p3=6_ZoYLl)J;id7^ZTvonjjxxEuce=^Ifv?4A?bu13NBBoP zzqfmBeC`=1t(#Keg@_gBoXcyKbWsXAO3r9`AOlR7Ko#n4obBMTsk7vFF1Ir=QZ*!Q z;zi8^Hti48MnTLq*TRIz&JeGR*MDZ)G-6|&6jkm%9a^Vgb6P@>&HetJ{)Hux)otg} zU0GS?Jba)INt5)%pt>)7R`a_0FXIKS(D?tEi z#cSqYqGs2{%7fE%!DsiXDt^yBlvO@)i{fvDuEVETG1xlKvkoCyLt{!k>($^hPTC&< z{}3ORUMsbV6K>#t61j}xfLKjC&a<69a_5N71712YiLBu1X`J(40|*pBEnm)Bcan>b z0P$zMJ#<=}&R_h=4BVr)NINsX`xTJm_iC@t>~<9e+;dOyxO!TdzL$OvOyc~rSEQ%4 zwDf-`_}?A;kH-D?{qCHohs^|;3;t<*@*_6{1)B|u2WaU{7Uq=nt~%9C(|%6@HjNA7d&8LY2)va1ilOY z@P$R?`Z0Q*q4TFHLt04+n?6RhPI#cqmz0^Kp=JfC{ssb${bH^<5P+f%kP5l|4HAmM zKue2PkqlDyl9{iX3bW-Rmh5}<%jbE#`~dP}+b0Uv&e9gih4&GYb*^;>2fEnagKw8t z8WmA?V--J{vA zz-c~#bk()zfQuJ&m2x&2YLV?#1B68FARFYD7O2CJy65(4T8YZrx6hMOO6YVY*4lE@ z>n8s;<5wo|nP7B=Xyx2TEIKsOB@FmCY;Im_2%DIAYIIk!z5Z}*x{1#Vf6r>X3Y>#& zIn7xkua|z_pAN7yI*5k~vTf{uZY;|tvZ1%}dMzc!wzgvxx&U0pK3?S@JWQh)4IpM6 z5N!i)7RB3c#R>La+p7{~@22mM)|-XBhWdUpa9w1HmGbl5UB*?ADpT(p}A=!y2Qc6WTaPB-7UVq9Zb5>!cf?`@-yhFV!)-X;w8L;)|dg1zE`hN zXUQx+YE_jwx{pB~aREI>Vu6lc&lM9m-$*q%%OEf4SFf>-R(O;z1I&a`VKj|OB8?Ai zr|4_K^)21+8)Uef*q6SEjqMc4J}3R4GF?W%q1HmZXbP8S!0>=*K!rFhd#rczadVE@ zc1Xucg-TQ1|F+eO)GkspdZ^;ALBE_Nlws38`^nyaC_h3gqJ8kAHE6cpUD&$@m7b`GTKz>GRX8vemP=Bn7Zpni@cKGoxw1xW~o& zKbr$BIR!knDv^fCZ|xC4>47`#C;WQ&myJ&To{luQ*MMQB55LndwVDZ%9U?BEty1NM z!gN1f&W|_f_%3uwLvv5SLx2Nns@&*kqqF!Zw@TP$jarMX1?VBd-|%cFKKZ4!b%Bdl zo&``t!C^O6|NDB`1oykqz8G!8)rKA$o$Mijti{1yZs?EL4!h+p08beoukHK#L(_h* zwwzf+*S?`9uE%R>_Xf;`Po6QmZU=Dzi;(KNk!XNfcvN6N>DAWLOp>FPpjNR9&u4Ha zEv!hD@RC@*v$^=4EIg>sYjYVRSk#Xj6JBvxdQ_+l?VEfbYC!_C-3@(Tl6mWtUl}OZ z=;#ko>2soUbV0%0IyT8rRCG79H84edLnJo%6_mqDdzc9*!2{I|+Xjyz^KLeoE{A1y zviZ>H?XvTS-xSwkGw19)=pLV$Yhh@hq+8F-gj86P2zYE96Zw9514k(od-_iMW>F8#5+j>O_sdIWG1Ey*Vmapq*HUm7tJZ6sTs1s7%%arQ@f}e*@eD$9-C-mCCM8 zxE7bb!tTc0a-Rjwj*c8S{P$p`rPpS40#LD`Bs|w6K0PP(%@(I!E(u8zwE3*MI88E2X-^!VZDK+Ru&haVhxOTQ(>abKs0v7rd zXsOv%t5#2`WkumaR|Yh4d=xh&$xl$CYKMhhQZihvZ5pfe+r~;U0%=ixs#(|#dp!S# z?gEp8B}+cKQM2MtKx}b*;a}1}+dMuXV6gTY7vW!u0g}cF z&^mH!0uRnj>u`D26SqAvAe-jpo9Ce;&p=8oIAyu#@TAD|NY$w<)6-U|}NLO_Lnc2%)07OYT`+u?bmSItD z{oC*+6%a%b*&?CRt#pGT(gV`n-2wv=Lx=?^-7s`WcMc4QLFdpzch1l?#B*`q`~E-s zR-WT{KfGVw=iQ$uj>{Z=oK{D@0Mpkig^_aBCm zckX--#w#^@ibceFSDpPMF?U^KxZ|pFSxCNKG$nO`T%C#I7 zJg_E_NZ6s}l#&86j?QK%U!oC~ya;s2U`WkE?Nv^13SOruj#e7iN`pNyuUwjOYM2K) zcGal6w7}*2E#isAH|)>XlXqQ7I+fCT?iUUi&wdtja*jmwCO?tJzu6!m(S5vjZj%!~ z9vK^JN#Xh1Bz%D2^*ed~LjdjXLS~o*SO*1PC-DRi%GRx95M@d4Whh4-xndeW|D zHl*!(iAi>47R3oWwLl0CTN7QY>q-%8og8lPboCg6JwC4>Sn{d%7HBVD?1a3S@_7jl zwq)39z&`XI$prUI@$c%U@Om0OfiM}55ekn z+*k$SD)zs=-h&s|(yYe&y-nwN5O9~lNrS~+l9RU(f6TQo}mqN(Y)5Td-lq__SeRipsjxA z+eTVyt{+77c%8YO`vTpEJbm0posYYto;TIG9P8)8K|v%n&rb?AtWYaCL9Np?yN;oa6H`s8 zU8i2L-Wy_vQ(PxeS_C)n4ZI=Bbu*`~N~H5DI;FvwJ(JrfAe#gN+kB^|#Q{-N@G>&J}=}ri*wR5fLlZBT?jma>nBB{%0>GDL@YBi`fY?p$4OG9k!izOVa`C9?)K{d9t z?b{f}X?S*TckxG1uJ=~D%;GTo44MYgAgdkl*Zh(Z5HBFpL}?pWu?cjUNa{*1r?dv6 z!s;-*+6Te+px|DA4CHIR6p6TOaTAKN#?X;y3K)2FTB}@zuQ4dZ@?vGkEJV9IOl}Hi z*mjAakKbr?D?M0zxBNjtUOrf_7v{Y2gafLv%;}ZtyF{dEvxQ!aOSvh0@P??~Gs)S- z)uu_9_({#}BG;axux5w>EO|C*Ef1^O_Y55kL)R(qRlU4;d?8!iJnyP|2)Z{k5(VPQ z`rR0x;u2~!vkD2Jv5ZEHJgTgnSsp(Iw`>rv zUp%IsUem5yT4P)Mc5^C*jkBpsZ>YxSMJ~c(!`m!NX+6pZ36{o@&@${dfX53Ot__Rv zTKjz=1~{->#IQk<9-$KPhTco! zJo63A*WM6uCWzA}op657U9lQxc}>k&pY28SnzhAAu80Yg>WZfM@55mkj4GyKEHCXi zLWym}b2R+USyor?sg5Z2vKa{T6;AOzNS8*RE^~O*z|HhFiDYg3*93VQ*-Nu>b350D z>=dyNI8>3J8sroT?r1X4AjgHDGQv(p=MKG%jR zPJ`YEylD=%@lCMV&AQ!CWb)$v`bDhY#o34=aUq*9C2L6r7g6_ObV>y0P=N-~J>Jfu z&!@?*j+<{`$|ajzX6kvS()|;r5wRuRZf(0&v8Qj{+gXro4Ko29m^6)fN-$&3PQ?<&T8teE2|_5Zlg2EMJA~u?n%$9JS4eBaFKnq7R?gwUvEP3 zTQ;-sP{rI;d-0XQLyvVRYRb#|Q^QV&UAgI^d`&3hj^p5#EFe3e&O`tUt!K^8#w==y z1PF;;*cI2Q0*k8nVo13B3;8DS4i_&2F4UVlFjQ9N=52{pcG>!4E+9uH6z=kn$-UpJ zYqH7Be*DHUf-YV!t{5U~Q%W;;6dC)4zwqBXCE0*?UHgY#^7(HD5qOXb`2*t^wwofq zH~6q{=Qt#rS*Paqu6k;C!5>%!`ktd#@&1rHVPR`--_8AnbNpZL6rc|*X0hWN=(hs{ zvh!~dexIFnp=cT{`j9*qZMKZ7uA4(_vnQkp>(q?Ce&c3_otf&y@tFz zF!cxmh$U1bqt&@l%(xw?xH$PU0~?EZ$I80@O6z zU+Dd)H*%0j$12$SqWz93ms zvm;i{{jk;cp@|2x~y7Id0={ zI<_~lJ07wv*qTU17x0spmjjX%R}M%rCu0kdwCyXu_?{Pl!+ng zIJOEi(YPT8q>7FqOci?)6RRCS8{-~?^r-4Q4 zHJab=RKPPXV>2QCYO(?{kZiNOQ_z=ptNvwPx;9sI7eB)NKQE4sg7&=I9G?I$+|68) zZ7v6kZtVFziv%wEJFVuQE~eD|z?>CO=fAHE3)cr5x6p^G z@V96JxL@?T{{9*nsaPjhfGSA02uMqJUX@l~T2+3w9$>xsMuUFpA z48TY*oZL(EyW&t|rT&4%MthCzw;&hTgnwS}pD%cYcl_sw{&NR^vul$7KQJ=4Z(d$Y<5}!pp?L?yu=(6;J~uw$-nt) z*t)RDaPzo&oIYPA_&r{gOq0askBy3qrbe=;nAK4whu_i!%C$W#7ThhYak=jeez)I} zv$spjk_QJjMnM4J32>$Z-dsQ!#XeRQBC1*D<#MH$jP?KN%p05e18~7Z?#3bn1lkM? zF?T+HU$t-b6Ue@nLbm?<4*}j%QPND1vXezkSjXOXM;mJT3|rIdi4A$@$Iznjm@2($ z`^j4c>zM{u_@4l5Li@3LM9{1?J5!Nr0(N!m;Vc0W(6T9DwXuNmC@N30B-Lz$i+RV> z#2>1?ApZRk55%!43xK8>EGyUQCjWpos6Dc*Q7X+)hXC>FS>|)fFXQE4Sf+l9eraff zyj+k&&he3G_TCi^_Krb7SC;`}jN1E+F(|cgzIJ-0I&&&z9FhF%X896sGw)$T|H&Td zIChO^s%iiqOkFftL$J&OC+E1^RZmk5Fo3x$SS~q^*aPwS z5kF?VgT)3(xnq%e&$JriZTS!~E_em&%1jf3$~e8Ya)Gw+lH4$k29Z*8N6Kg@l=4Ss zBy@?X6SvGg$;8TiC86ls6}+JSl5Tjs8c{ti3alCfU0w>2gX+6cX_ZRvIE~2-Y;~^! zH!<5{6TlRVUk1+kTzO*YfW$$(=c@gCK$-(&F^|)~Tm}6irM=8zUgc2y{sI38cnkz6 z?+C62cO6{^vY0TQ#@{Rs5J|md0T!dkoPSk@^IZmrvV4+P@vgW`4-|pLnEW3=TH-+D zHaD-xb=5&PAtWs&Bn!5^>Tlb(9vG9;T6tZO(vV(P0~Qm1HJa>W7_e(cf(WAD@?-!k z8xJg|Q6z0Lje3IF+ozaN@^Zs9+-@E=3^t9$y# zkp6$f7e>w@J~zdo{Z2ZY0N!8<5k}HJK(7w<;+W7@t*cuY6DP{}Q_@smf0IIjbAs z32C2WP_PI(K1RW$cQCYmvIxe{Spa*$z!wuKoBoVCD!0CX2wv#&6CTU1Xn|?U`2vB3 zZJ8{9TmRSTy^`gQ>wr$ zZD}YAD24`N|ItT$mgq2&q)1K1(uZ%%!0Ms)%q{`hVhYwF#+_aNvCP4AQ@+#v-@6bh zuW@fDe#^J&P5MTnsZ*Z&fg~zS{QP_I&K0604^+BvZ{ICF=~*aD6mjw;U7qxqe3%+` zAqzL!fi`TPu(|Y%S>X+NZvTi2tS*q_soeC3yiVf@7mHSL1J&yptbv^F zc9$JFCV2p-f&|eM0RoQETue%_Iu(|k`5Jy5zzjMtH;{NKUb#A+?W!F#E8sBBp6VlP zc$drYk>B}oZo*VNJ;2r$!^qg~Gymdqme;;5T+JKcel#E3NnP-}z& z4l-2zlAMA~Nltz<9ZmNII#hU`b?X(wP_}YUj*|ge`f{F0X8_yhf^?{n@66N_3|irH ziscagjQW`(FsgC5-JY8g0RXCD>oN&;Lc3j@D6d+g{?aN)H-O1fOg_psv#%LzZKd&R zY2fUTTOG&-5U`l`kIRP9;GYArz;?;|nUQMqPLF>J6J0{;H+q3rUwx~(-BIm*sE`HP z9Opi-=(U(?z}q=HoL11)wXhjnk=@mYS{VuqK^)%c_NSpJjC@!C+9&zp5+kP^ZWPxC zDwon^UoV-LR=;($e!P4jI7gOSaC$iX`Kg%3^;cTrN}*?6Jj%jYkGv&=ZZpt7)@mr0 zc)a!W-thoibt3xJU_psOHznV~JBs%TAxkpH2k&6z6S$vyL@rD(wGG2Ri0*BfxpWU{ zbwEA#1ipq3hJ(eA_jvmC^#d$+qVrHWOz%ZmDGZ%cVCOwB^arZX03!qcp_j6QpL_~d zfrek9%R2I>NJ6tr?|tviH9ory#=0(g))8*cojVCfKj=L$w}>g<-q04iXptqWMJ^4) zHU>g=sY`?37kOh_&aRg<5!7qKE;z{ksFcRhlsKFQ(pM(A@)V)<)DEZ`Yuc&Wd}q|`cY|AhCnWS&ad8=l)8B3|+` zkqdl@3)>Hs@$ks7Mf%bdM|gT;4yParp-y1^8I=AA0L2N@57am?Ze*JR=!@7(nhxYu zOpjR9a^Kz195dTAoA#{%G!~jVHqS8fKwTDEmG_k2sk9?SN|ZL!L@#_-Y`u{4DON!O z;|%$qrHTBT1!;~w8{@f)?I;TzlWb& z#=40XRVbe9xJI_=-TAdY9Q;nwKJ#H6DIYapIZzrGNoF)+8Hxicdb7L5Oz-)ZW$ zHixx=%+U?VPl+KVHfnE0g*-p)#<5BF#ai=HVq+OK4v}07>GcMF0|Tt@CQ?U! zs(60i5oS!pWSZPx^KsKPGPD$)19b*5QPJ*Efwn)ruGsWDUWE5s)+?b0M!&q2f{Cm= zsWFZz^3l5xfs{x?*T&a##52A!JN4W3-_i&e$-1y|-1JVWVeR~noBWT%yyF@07O#+3 z`?2*5t$lab80NXS}Q$R%dxPS8` z6VdsIgYvbv4YO;K{-N;-PBLkXqc=K-A6gBDzSo>4v&# zK@4w+PK|5t!4M)ZJh7tVZZ}``L{zUrjmP?u#LwYdv1&rEwyHcaqxW@8!;7#$p zqUFkrpyp|aQ=wB0Bh4Wr`AEN#ve@cPK^qnOo(a#MR{)*nKEvmc&y+s(`>{!YoU(LB z(u6_c<`hC3r!zz5$#ps3jPmgvsYT4^abx0eoZNKJ{8=H?r~|>(Tf2f^R-2 zK9(r$N@_xL#sZu*)U|r`ZbEd{NgjdY@S~RNOs&zNS3{C(A7`Z+HIf&D7-cOKX4cD( zUv`@}i+Ko_kQHTaP7Ewt_HRz@tRD3?7O?+HBA}9mcRTi*{1C4MB1X$S?qCy{kk*Ic zcHgK?i%>K)fSL}~b&hc_Z3<)=HrDqP18iywMRV+SdI11Aj0Rjuq3I&kNh~);;#_-C zfw_1ez*Rp?+!HQcfi5?+x9*3&OJuXlt1B=uu4}+#2P7?oik!5wY>w9=6=GQzaSM6d zeF{4?vEs72zg^~I%}3r|aT@dvMl zx!}$>DD$yurS&vlAh@A8+)6OQ@Rm&H#V1hA39DmLX=d0Eq^j|ViOQ+>wFI+ z5$;oTT{C+@weOz8zuK179+lexpsSIrs7%J|e#SG#p5GJ=5apKLwjQYleb47S$%)xl zuhT+FV)b-td?9_PUM5^+Bw@Zef1v@aXRN^zomm5Tz^cZA9+#p-J_udS8`kf}Qc!=9 z*&k^kR7s!SJA{5-c9mfvqY|C|kc)0(rb<2UJr_ z^1;`a_g6&vDpbQSuBBLN|C-yRsa&Twe;YGJuvx$EeewtIhdv!E@V)~=R6WsIH&+6W zQ8Tq^M#QRKv&wd;>j@@j!UlUA@Rh?OAQ0Ou?q_UtM~;Jm>wb*ON;=rZc6icvAwov5 zbIM28JvWq-uxb*;^(kzON;*REtYY>QCX7>!RYhp2riHH}J_N{#vS>Be;RVy9nZPPcAH zKSx;HL9Tn`C0MunD}b_Y#jLG$@brd~Ds?!9R1285sr6VhP?u#%?5_>&;uXk8Z%G(X zj(&7=i`o>ZX_X=o60|-no$ZBr*y`6hM@+aQsh@(e$=+Pk7==BgtQM+*A`epi#^M%4 zOASZ-MJ^zwsKGC4IE~q6AJiHPggBLf;RHsdlsQyyS6B4co8oiD$ahi7;{BW*HcP08 zsSJ-VH*UnQy86CYu1FryL%LHhB)f7wF3Fvp`lkIWk~B@0=<`l0TyXM?vf1!s3}mq^alSQ! zp1ALINMq3%J#(-0m1I}|h}^gYN=+%afCrTikk|Z}G64lz>e6`c zJzOqR%?|GQ!qv#H$Gg%uTiN3>6cg83Y@oA)f|*UuJ3yOfeqdc2EQd|BDsVPF12R6= zT=2$&R%(w=FfmG^5BHXvFCD{kKikCh>XCceOcLvORow+#V!h1+HBNp)`-2AnQJ{%S z`wxl}_w`ApFzY_az0m0Nj#b9A=vYmb!QyZvXVV;|&|z{#HZZfs?J&NV*013eF78pC z?Ant6|BkM2>`YjeZ-PS<%Mae~z&tk_F(?e60cN@k2wlS#ZeM^$4_^OfHkb+N*^RRa ziknW>uszo|J>E?0N!6Y`ps7UI*IvM}P?n9I)1_9akwjc5lq2F@1XttS=@Gw|rsH)W zc%{ksTJ&mOd2k~e-4YMS@8 zT<>IpURms{(EAe{MDyoS8jPkk&9kQ7O`Oh|=QrHN9UZI-stQ4)pJ@SyAHX$y-nsym zJZiRAoTErD)m%U*j49!zrn2~Uypx_I8~W2dhaBWkcTuoY>e7VQ?D4D%FgXE1y6Ucu zrZ~{9+?BEo;Ndee*W2SU*iuGktoPJBty#0U#jm z3Anw9PDoe&sKn`n=~X^2wH_V!l@c4yGeCYa-Mz-+AVw`szD5!X?G>EVB=8c=HbAC1 z2sx~b&7UN@3n7#E93I^iE#*40Wjln|#p!!Wueer&H!|@Hru@(j{mmR#MPq&b8mj|& z+&e#Rac}P`XYJmDKF3i_4-TDK9XY(4u)EonCPFZNl86f#hy!G$>^t&SsIOV|CO%|J zAx&0e-HKPuJ`L>M3-zU3@p9h6$-v51k;WnAfHO?WJ8*=D?0r{ev}$1Gf4)f08B0vDaBG0fWk?W~yL6HQ`xQhl89~PEhE?#0sf~c* zK9-WTVbS<`c^f^iL~fEqP+h2XqQGGi6^@c8USTltZHiyvI+9elE~z)~ejrOD!X*s! zUn;i+KU?Wn^YRUoIB*XAn&0Alo{aOW&;KF!AYG_1n2dfzwGt1=Mm0kz(md-o2H{j} zWxvbXWiSG59@}VM^=LdsOsV-~9VJW(cF%b)H2=$;$x)3oOt?c3?Wf`S6FgNiWTr zPdEc|pXb>y=kBqjdDywszaT>&^Cg2K8KBdLCzvKT+sJ4ZC_RHb{nDWy!ER?$-}As~ zbX(--B<_r0C9}W(4k>j&rMyM*&By&V9B9qGkoEr=u%M0G5tp&O+>0MNk~lBOJF?~9 z?c`+`nSZai6YL*z{_q5o>VT5XAB?7#198TjMO{u)Z2MrZ{7s81poKXN-Otl){{eu; z_4I~9b$V>pt^wog9Y0RRP7bJH3#p%1QL?Pmt1*$uXF?Bydau9RX80!b&|mTiY2Wmj zFha@iaJ{hRa`-|ko34a4iHpPSyGDb23y!8p*N+Rkb5a`?fV?A1gMAY>e-2Ml;_vSk zUL$#lpQ{y;a^LJo3D(MX(IJy$jX4asH8|Sv<5T{4{iIMied?`8qzk0L!FnK!ec*9{ zYHY%=X)o%DbxW2dO%!&^I$&F65O(cx|*8oDI5@6P#ktt<} zucLlVJ1=`3Vp1YjebWuz_g-&UxjdwAiAJZ-?s@wiYJu3;EIR31PaAH#$4NFCJC){} zzEgjr@x-rw^>1kDfHpkd?P9po2v<_o3g&q7rDD)}7ko4cp&6**^Wx)a-JI;@l68S4 zH=P>}XCSKhu6Y(JTuyKd@IdVlHvT6cKK_@Xl9rORda;6e&9Uk|Jt8yTRgHeHFGvu(p0j4=+3SHU^)|D87g#4!LWYr zVB7qcNMi<9n#U$W#M`b6jV2lhA=YH@9_Nttrux?&lR~i0CR3x6spZR?YBHx4+(Q6Z zX*T+7iGFM%H1@w=QLTB3;jx+bSJ#Bm=csnMvbtU4{yR&BfVl~1VG55-UPz8UYospV zQI!2|4I|e6vBFt?_x!RX=c!5QSK*8uFJUD?ypHj6s^jB;&F__Ydid3#j>6IkY>TgUvn@l0C*5Q(hyz0_8r|0x0fZ!3C70Wi8hc5D_i zel2$Z+nK>8*_^*3?vX6?t7E(VK!qIuWRiF0#)Lfk+ll^bvBAZFf5^a0yjuGERsDD0 zPO=QJx*F`Mnw*6HdA0wo`S<_qCBMwEI(NNtkoIH#_3?MC0+PHJXi%ydy3KkWrT1478Q+Uc&%p+eICd`vlnYG-e`mi_O?g5pIhQ z$L(F?46wrumcGSk_Lu*%@3yw((fkr}a-T3Pq}Jjj=e>A^;(Sl@fy9FqV0IyBH^IPyRi78F;rnBM>tIRp;#SAc48Hjx(XnHsxj34PUz|KMr2A@psKh&s z*J+>`x4dKFHI>Mv9lCfU>Gcx?*hI1XxO}V<>I9vh-eCu1w&^==)oMj~bxc}WANECp z|Bw-*Y zfpF?w&1sUjt-1_1l(Ub1pk}|9)5Y?dQs~otLCA}bgo2-e>z7VCYWYIbA$X@P zzk#z*^+&lXN2aAtwQJQ@F+?2L-z*r;fzSApzUZx=k4$9d@eNjfyRbR~b zg$`tYXr#x)sX;t4O~dz(d3_H=PB*v56!!6U>teZsUOidXhTBPA`Ng@$ybohPI@Did zPs!%r++M&+PtD$_rz)Q6x+mr__Or9J%ME1D6R7^cEtz{6?Ap9mGkqTJxdd74ml22q zDi_Q&$$LQTbbO~TS!135InzpNYB67p#tAP#1s* zXkmGOd3cC^LyG1;`^_7?YB&-}%wk>#5&cdL-}fZ7zHG5uhvy%1)O0poIPO6QpR`so z-^F5^mM;6xXO`BKl+pb0NIUxhQ#Uenj7C&RpAha|(zfh~m z)RAD7%EqAZ-j70ZQ$#!{^bIFQqAYzBzmWGZdTLwQRddoyq2!BLf^ha$WM}cM#*j2d zi;?e}LF^-j9#IGg&|~P3x;d59rY84#dinw_BcMH0XhQ}tbHG8Q$`^mcJ^se`Kg6k$ zCZzFA);fbOnv3pYi4=D2H`x@q55Bn;{1gxKKjg>^t)|?b2tiHpJAho`c6H;$&+K?a z<{#z)B?KGpb3~aZbqhd={88lrFuF1-bAEQ{RkyYiV$t6M!LpgGHvQ2Hl>Pg;9oFbQ zyFR5Ta!^lw$vfSnK9E!GCugq4Du^@m?XHtmDlJw!$$6VSC`fDR%ql!ASXMI3Pr!8tL zEs(@*oyu8~14xGHqA0)#0HMWebqTdvXer=<@b32IdCuOU)&aYZAQld%eZl#0TtIWgK9LsIaB6_4Ofr~eQ)M=yHR##K}+L( z<8VdJ`<2~*6<^y+hfo_f8FE&Qk#k`=xxvm&!1F^KXaD19O^CXT_RL1XnHn!K_O2~2 z`)!Ncsu93%_@u&obtcO#LVRdju>fm%gey*Y7;wKZU#BA0NAz4FPqXp8YeonKM>HoC z^eQ%2@%-6c-x|q8>LJmG6x%%_X3%@j08K=YI)+xlHv{3A3!=^#cVxfM7K>G|YMGxB zs|^}KRJX=c3}Ex{J)XiUkhsw0K*L3P4(sEA+Bwgh-P;O|v59**aXew-u#Tf%xbsq< zR*|kWCv5LoKW=0N97+f9WVO-*fXAvQQlsEJ8jz-PkCX+p zj%0-0teL>Jv~vY^TD+;l(JV$n1!9i1^6r~}+>S2iB~L@-#hFurdV%Ip!r^pw=W2#f zjSJiO@I}`h39%7CIYkbr*k%D&ZD5G1;9Gv9#nH!zjJgMva1O4fo%gFwepXel=IOG} zSR6Ju4`0fdDB@Z3uozKG!^l9-Sg|%4x8eFil?&U3LEm9HBBwW8qRU&2 zx+>j+!G54Nz1D zpvlwo3bR&{{`KRppB`hti%vb>Kp=E8il=AgPd9PbrLHI^rBq#@jlm%1r5|NRlLiR= z)uWM7UCXg4V#VFNgDntFxIn52W76TF?%vty4~kSYyZ1%^sp;;rACtscByg zas}#gw$v(W<5douKGmgXZ@o@2XyT>)N^M_mPKo=p9DH3?S#)964}hXb%-~?O%7;4S zpjpdXxuxA9G>CIGe~EKbiBiDjg!r^!mF7fP4wIb}CEGx@pEaevkO>$Cj{`pad7iaVV?+)#rT;1srF|qQG=x1Y$>%a25e0txE?dD?~$C!yxElJ^rZXQ zzSc1?SU8`Cg@fPjM&`2I$kR%nY@`U+-Q??d>fp+3~{rx z%&GAj?erRH7C|*=`5u`hp#Ow~olWf0Euvs2A^~hLwdrr(C1q8|E=?~p={6k?Up5U= z^KW?SSdEXk$iw(g!mS7n>$EGagS=@@#+V)(XQSKEaE!0c@w(GP%wR92$5g-a9?^k{ zw1H!No>QMyGJt*&8QOP^$NzJ@Qb=fU*3(hX82{m36T~0A`;LYetb365N29`GX4nmP zxb;nYd!YX6f`$ZIY)>V*;GsrFTy`@P+s(p=NfNlF1D(AZ@UP*7z;k$0k>Rbhk#veG zoa&yt-5$@+utUx+(o(z8wV5UA`O%o>cg^#eRp(~|44@QN2XyJl2ZhX^N^cUyi`1+H zjQ6)?XxrwMMd`9T=^cgltx}cTddkwCvN@H?-`97*4w24|W@lweOx>NZMJ?oJTNaI; zy(~69e?Xs~m8}sS)%}tBtV~*lddwjgEIQ6xM}fnX=9Aah63^!9jAcK-G6%C=>?yDxkpn*X)NPu!Eh@Y z!#Dx7c!a0w58_qAW)G;>w5z4H(w+xFXutCM@4h`&q589saAP|=&L}2bszSy97ge!t zCUMk9QCN$4ZEEhT~u3i;y zkC_jXSe#QR?s}qaAp$vycxZX>ogKC&zP20&KSSLh)T|@#dV}wc;!f4biElRa4GBFU zBrn{l-renGTPR0qiBzaGAoq)7v<$&gp zch$8`V9_XJ=x z{fNk%F!H@Nn9p)k%&loqqA(>d?o37l-QuyRA(njpGxSV_vyuMw$D#_vY39>dBCZw7 z_eX}CBzi;rQZ@TPcG+k!d&=$Ow;|)QNFFyT_54HHxN!ph7aGqa-{~(Ihf^1_G_*UF zyE3RT6Rr;Gn&~+z5$l=Bp7|uF#$}S!jggX?L9xneWVONT!^T$`SOOfBGhS=$GCqih zR`E9Bl=cW0S2WUm?>^VP!1^Aq8Hp1Gy+9j*M3c!n2r_4%mo%I1o4tyXOv)-X4R~6h z!cs)Z$oag^Wmjn=1ayIrcmzmUFDD10sGNLX?yn+vw(e;EMFbM?h`w~_(-)H5m5+Jq zJl(Rnr#5ADMe?!)9!ZWDt8p48pNj|TXAg?NED=O|wX>AF=Lf3&{@rBC;`6*&XdENz z{Wlebh*nkA$Vn1I2)NQ`Z~L`JBeKp)P9(~3u1{*c4G;%opzDgJS2{~yJS(G>FVJj_ zPc@r7nZkn@H8KY0;@uoE%8izPTAuT#skl!eb}7ssZ@tLAleio23KQ+Th^c+ zt-=^SL-EMi@h3!fnp`NKK;F4E>36*YfjS@TkSTh3rv>xQAuR(= zEu+cF;>5$T51j~Q2}^k81{%NU=VuIktA7m57c>{wC)N7Mw!TbC;zr&2 zAE@OrzlLzZ<2Md+g+h71V8BG|VUDg2(qt>_;+=+dv|l>K<_N*T7)yL&rvi>xwdmPkCfpdgZTbE?Gg-7K`cz>uEh+`;MMbb1mgbw0k zdxTQ@^D-xr=0zJk9GGdD)7@<2z1{OY8rqoJnEJbJLe(y z@K7b%_=Rq`k&M9zAH_0Ci|qD_suj_h40Qmb%hHM%66$YUGg$8QBiA0ov>wEfvq8W3 z8B-fb98MI&flr*-^yj`F;~D&?Nc2nMx-dp#jr1a*Dr=_wX)a_ZVM&8}z zLZQj^D=oX`8OLV%(|`Dgco(}Cd-ViJ%jCTSN(2%c=`U=Cc;q%5`b`EC*I7%gGraYN z6Fpb|!#f2Omi21h0cE^`N-UQ9mE~F~)5bA0HM7&*2U`G&A#|s===1V+#IY|8 zevfz$)CLqkqBwv;Cko$fgC8>3OGJ66Z;wuw7p*P`U`X(l5`?>jUR?C+u<~m!I`R?$ zK=_n6#5)M^<+pUKF4UEQ_0xVd6Yt%ohqyCN%0 z4Dza3w-PxcU>El46rZjy@w*TgSJS(=&BL=?R5Sg`5O#86P-$I;_%KNuCZkmCq~M}j zZg9g(Q4aK~W^=R*{ilZ+6~)OO{3S_&ma-dRajf$j&Ga`K0Yb}juL9PaYD28ZHn z_XEw8dN_%-ZFI`!w066nI~(sb2a;^j#Ln-$6vE|;>h81A%U35L`M3%%PoejNxo=hv z{fA>W;G(pdrX6lK=@*}-t$!&vx?y{Pk7FbngcVn2zs$C_h?IdsaiE-`$7{m9HKh8@V)B$U8Uei{tVj7L}= zybuk^p(>MG*oU0ogZ2X2^F8iUhwBDME-=`1i*(3@8ixhkyl1s;!1xnKqJ&^_hMGiU zD|p3OzU|u{%D-mX*RBoXo=JTxr{vzI5m{M@VPnt4u~P&P4c1SMMEi;l+W7O|I>Riz z*L3ZQJFrTf)JRNV38Gvn-yHYdn0M)Q-HFJpKmH7{`}*b` zhzEcTk$zqxRJu>;nc}!Kp&b8M4$jjw+R!Ar>l$g{Qa5R?x$gZKwXw<3HUk0l1WH zX@Z>P%j*oB+z6dOne(!2|L)Jwzew|`<7SK-gx{?6gi0H~0qadKRT4IMr%lqXy=v+p zmvT5)cEB)y=;yinkdJSO^XFo}h?gObMB+E*qjS;T>J6H*4t*I1k`)04-~M|HiE;e7#(-F5s^6*6+{mZL zH!DbZ`+A+p7=32(Q|)Oz#SZv{LcJ4UwBs2@8DC{8AqhskL%peMs3-d+gQoy7=^VOW*{ zGbqgKz6OAs57H=eSofKmQ{_!_Jth}u^Y^|c(%#$H6u(ai7EV>7q$#k2H&1ZBJ}PB` ziV%5W`iv4Yr>W>BOKPW1Z}}LwqkAr_GmBD0h&1!HzaoA5ToRYv3#Mi>pOTOLyNd@D zzuvN_r2A~~7AKPs-_Ts;l{_nB*g^%kdxOGQSo%+}IdW+TFWi+JB9LKFL-G8a4trS4agX?{uVqQw+7nenH*CD0pg3HlX(I{=_j%7`gCy z#3&LWR#(sf<A2`J+P=d11bpFO`%n*aV=8wH#2K2)f>A4*+WJD)oNc;4AdV4fD7xvLY7? zoh<7+r|_VTLaqRdq#C&Q3F;>2cgT zP&&&3)LoV+x+#wr5Cj81=z~Kn`}wOJx{b0SQh^L+tcxw|)G z&+^q@q3#OW?C>sY7L_$D#mdk!oMcYRv+JBl=#(#b_j}H}_rRv`q%IyGY`jte`VaX` z0aL(Gn9xX!8qO3Ie)^8qq8khIUgQ0$}z_6MX&ELs@ckLNoqvl&ZMv;8Fn5CI7n#M*&;TT&yKV z70Ah6B)aPpRU=V7em*dyW?LXwAqvP~l+@A9SzGDN?Ax?##mWeHtS?uFM<&kQ&x=tL zYfQ?lS#hgVp<8JHH*}sz`=;uhdi-fWx^>+KxnyoNo~>(_cJTEYtt(0tB{mEz6l!=+ z?EUPre3(PZ8Jlf6I4#PfG<4rO`SGjBthK@KjdK5sy)Tc4a_!?ji4>u-C0i#4*+v_( zQ%OknZLEdF46^TpB!p7PE<3|mXE0+g*+SOA%#ehbVQgbCmiKm^r?Yg<`~UlXKF@#m zxW`=gb=}wUTfX1#4=+||!x&@W!>hM(VEIN%LzFw_X=e zm2|O7QU1HFxM|WI){Wyv!P|oJodwj5J=7e0EeS~HT=ALXTZ9hUbR3ed^12N0qUSz$ zyPmi!l4H<)E#r>OfF$a@$j-R#Q0(`3j=d`Zbs9-<5&N3&oU{IuKpA0gcz@d6ri7HJ z)4lVemoC*o$Jcm(jq}k3F-US|&Txs*`xKWgDhY;jj6(%=&agyJA}&?z?)gl%x+~tM z5B|=}wq(i1DG3&3QQWQLuG?Ab>2f!3o{ZBXFH&igqrF)AE{!GSD-bHGM} zStj@Di=p9Pu7VfFPYd9=t(_<8^p4@GU1^697_#`IYH7DVB&f^0{1AO&HFJHg#yi>Wf z&Z6Vq;Jq~Q^rW#P-nAL;BX43E|8T(C9f-DH%l)dv;$%D@Ovt#ED&_o@IE2>oCEYpg zsq&nugD;>4O)jgQ#0GFO`MgGFN3l4&`|Sw?j@>8Yg2eyG0KH-uF6Em{w$TddWIj%N z3xTuODP<*_2gVNqlq~-4{F}k_o*(3tqea)Gc*&p zzn$!&ND2!!9>J`OV`$7Id%G2U$-i-@#8gkb=;DK$x1dih za=e_(ThgCg=f8e`(7SrvZv5%gcHnt~{!f7p3B3xk^C`kvzQfFlzSe2iR_AS+z*b9A zkEc4TW}Q`zh|m?VS!}4i-{vSCtekab6ywE>m{>nh=kwA(C1R4jum}SUe5>iP^kHFI z)c>aMl^mJ#B$`HX2A8X9ps|gX^^iB zd{)GIfZfF60m{Yb;itEb7z+y5r8rEKn7rm_UB(zLKvjYYNt!u)DS`$WDXMM|DlL6OS3150Jd}56~@T*^$*qg4_f%mdY!v# z%JvOCJ1Q@*j&vGn+u2;=zoDF117Vl!doI2kZT>!;-G2|UKfmg@6QPlSDO9CdO`KY;rvOjS!0yD-wqT~i%H+h}pJp9Nq=8j_EBCvKn z6n$KnNBQ?R(b9A4lK8zFEHLZl%-92$8+pq%M<`R39efM)F+sf79=WDncdTmsx=b*f zJg3yA#JwKj<)f;tSh=f}tR-r(IqpE}G~h@iiQ?>6pR($(gxLekK7?7d19l$H$N!xX zGaWe+ah0b+-=@5$ryNbKa1IYN&Q+!=l=Mt5qbcjKW97+~-tV;2l<=N=qSs-vPgXYP)tRrDR>sMi39Tl*fisrkpvg0_{J`Ef(1d(QlU>Lp5ayO4LqyH&Vmh ztyb>4?{Lkd_se|>d3Qw@kgp|_ZCaL9TQt|%hgbcIHos4me`szfLJVMRn@=b`QaZa` zxnK>|OIAS4b>AKjX$}RMZq^4?Zr)r9rRUeGOMx$oaCGpz2J3f^F$`n&CIeimbK?~S z-;xJ=%2W5-Irj8WtIuFs(6i<&Ge3ZMDyAwr48$sDZf^PD2fps3&k$t^3K^Fm@>QWm zr8dL&Bl*}I6La62sN6c>5I23)9iEpndfmpO|GA)(=rsBYlwv8Cs$v(7Q& z%JOdg;t?)9S1hca1Gke%bzJ}lGu=a3@*XH1rN8CHZ%C+E#$&v~@1oojxOUtEwy>mI zUXmvzClEc}1z&nHeDK_vJX}o^g67KGZ>le}z6L0tzm;muf2-1xd{H`2&jvOBl7M2V zr-a%hKqq}Y>auvrCC(A%vTD6|uUy&Mt%i<%GJBQeSfY%R>n->Gb~huO-J&x zN!5EVfnD~}mJBJLYW!yno=8Y{79?)ac8P)FYhz0;#h#YBtE+`J)ar1SAJ=+6$$;{{ zw{7qKxF6#L8+F7}9kFkSCk#JXjm#E4q$}g|U*Ur^8o>~FJH4RBL`cqdz~@@Yf7rc{=d}UU zYmmUyvvk~yF3B-m--ujd=44YKznNiG@-~vd-cy)9%k4kKyIdSW$h*w3`P|3dkh2n;;34R_B z)N~!Q(Qdhr7Es{)$ei0r@51`sq@1Q0!PLU@VVR@(q=D(RG{PD?$fx^y)_#-cvEkLpw_Q*wipyOJSQ78NNs9d1~!d{?2wt z8hLk(4SkbE@zNtl_5ji>K`$B5$KX1rVY*ycxK@(hc(8*>ghzI`xpfqhy~@2KwAg~@ zQVKs<(g#sDk&%37&(wS!s+!ITGSX@qbq1>m!L;n^HXc*d0LIA^#{*XO5w}W0PSGAC zH`P9|7FyI(TywBb-O*j!jvS_W~R@7Z;*PV7$1K_)Lg=Rr;uG$I_apfa08 zt~s#*VFPB2ReShlt5lmBVD!J=xMK62%(Q+f{YeTXCenaI1LwNF7(=! zHt~8BS%=B(Z(ni2UPu4e6UrviF)%PJHA^--&_#$Tpb>6;ekk@b5;T(#!gD=+7*? zGhjM*?f5d|hJc5tTK%hg$}28rU&nE zQStejEkK}BxPQK&NY0@@leN@ZR~O)rWx#H=rspi)k?x;j?9Gsu>8u^eQCoi#uZMXV zmJ<;X0V*21=(r*=j!ZlsQ~+7XhajyQ{abegPh1mqNYXDWl1dGG=05%?H}-27v(iRlhgEKBnrjCex}&W~}4?4#7{`&qNK zBxrN2OIkGC*g}qj#(4k5+BuH8Z-X%B;Ub-I4X4J9Hpc#2YoLf^+q0pA45g-(-4A;$ z4Rc&V^YI0vCO$3N+#@1G&v;YDNOwo{IF7(RAz$|nze-kBToft_S>TvbtZ106(WLmA zvit%1&W6&l>FSFW$=rT{lm+=FM&-CXIZCD6t(Uqjgw)_Y?t5f5$4E>-@Fb;Gv{#;3 zar3yb-)~k|LAv$sYB2M{XA-7$zs-1Y8ofhuVaYW`vneL-p-j>?M(!nBQpdsVX_x*4 z-kqgOLVWJ-?||JW9nqw0_yqdp;vX14rXzDfQTs z)c?C9kF7QD$t23FI?lMW7IXafh5hIDum%aN%y!&Uaoq1!Y}F z@*jsTCS(yDgZzCBfP8EzU|=9S`1Zs{ep|Y9;pK@V2>+6~wJixtV;f+$JG|5SWIYk! zNmsmat;K1%y4JY2#PjH<4^f(xaZJ(+{?SU}&s6HqoL}&-m*XX#{4Z7D`jG^<2iKuj z!kzhx21cgAqwlnUZe9zg?QIU(tdt)|{SQXMxA0$NOV_5w-B12}2S8}^0R=C*b6Dw!ShM*vq!t)Q#`tkEGSq_O||9>=CVRh6PW?a@s ze>3hcl2oBg`Yj<@<))Q4isPrIxVGf9040pyJ8!3UeofZ$-A2})+R_*Q6Q)MR`XXb6 zZR}z(6Ebsu)o)dirEY%@7Z@q;2Mu3&o7O0oi#(Nn*H% zg=t;1$KPry$%=ky?l&o`nxh6eGUuiq=%ocb9lTTp0{-`|h$!{z&ss>kFUFQ&w!ios}IcI=ow}C;KfndPv zjZHNHu(6L9Qfw(T#waCV zwb8%k74YHIUnw(hAfajpY;}2P$Y8)Q95ELVB;;Kej6YYf&nZ@~-bpL$b@%{W?4KQ) zm9ogPmd#eNFGe$ps>mD_gMCn3g?EKcOfuV8n_7=E*$G82B&TurFS_j63dg@wcfuli zpsNW|%Gar+^^^%2MAbx5aBntB5I72>6a5c|LI1>7|GpqZnF_Q}`M^8tR1q&|Z$?4W zZOotcYskkwgu~>Hm$L+WX-+`fopwZgjfC1rY4nd3`uh{InKn8lmWk_50u5c^Z!}>< z+w{wE;o4G5oJNbj0%n0vzgTKMW zE5?ugWlZZYWMgUZ*%x&Y{v(?Fg;gb>{^8gTS6j4ewVpAH{X!Kx#5G~mQ?c}WFcEHy zh&C$I7ddW~*j`RGQ&bT>MALM?Yw1pel~z1XWUo^hSqr;ij@VT7i|#M7HzGUPQ)X1_ zkDD&=Xs0#f;DPsZV!c|${LlY)$Bqp6*h~i+_-& z%|t+N^q>3dpI3eW8%)$yM}q!ewEMpu%KGCJT!~VDc}4&I*O#@2JMHGMyUm}!_2Wv# z1>mN2_pX1m`WI5${~G+?kp6Gd{B{5RecAkPS^U4WEE3l@JFj2*!zTaJ1&E|&U@)1e zvX3})AlTX96-F?VWG9lCagmJ?%n-RaQesM8G{Qa3*B>!S5;HMf(lm2<>HJsg?8lqG z#V8JLz|BdR@+e*ow`H&zDK{T-!jvSnf~!r~kHCoI`d9zC>*|)al-3`N4cgPSgOa1@ zoQ%tzVx>zmqDrd3^s=Q@-CDCh_TV4Q0G5yY5k#pd%qDU3K@{T)$F$i-fH+KYc)s@+ zW8*($-_Qm&-M$j2_tCr0kT%n2k_904RpYY{Sp_Wh z7C9Eb04Of)&nA5U)mdYys;!!Zp{2E+^0*h&(17Kp_lubFdG(mUy@kT8r4Y&8tf2MY z>r^skD$L$z>|L63t)(zwtTKG5!9WN0B_X#x3CKd=9`>cP--TP$C*XZZc%YPaY*Bj> z$kHjX4RMqNR17GYaZQ4lL_2DFV^RJtJmqR zdCbbZ2xY|R9?7T|k@QWwixBj|e&mJM$4-vDXIA`{ZLk5@LsN|%*+%sb702+gmNbgLr@WqiW+dMk12f1m z>h_#YX9nuV*AG6!$qz3#K<@BNY)rdi&6l&f>B9S~*ZUNEM2#(mvWc}*1rumeBY*8Z zQC5IL&B0#bPbjNp9VPcY_Yr7Kwj+za_3X+wnb4AKK2!UA6Hhv5X{1!4NvK|ci^YY& z0UD%E1|bImSDd>N1oe)HDgLqs;MOnEeQ&cL6O(+VKJ(0$5NmF;^c0M68Yg2cS-sMG zk|g8T-U0~6!6zf>;~qIl&VzYwdGM@|%(GIYikmF=L~Iflo2a&^xm2-Tn-w|7K|A^B z4+hTN<;}OX`3YfBJB3#~zlU#rYvyO7@zG509K_IUq$NHx{mksm+B>~ z8{{1Ek%!WxUYNG|{c~85#$LXqv;&$J4n}m}Q&^~Z3Y4OJ13*SP04dmk?nm%mD%Hud#D8`QAT_c; zb088kG2qL^kcomcmov0bKtqgHS9tpIo%Z6hB;6U9gv{^c%eH0t!;tr=B&5MkQHdX)TXNHY;AJDhc!IKOm{0e~oa7 zewBFkPq~2qT%FerozD#Fc4=Pv*f|lv!ei&!^s4T(w%t1(zRvL{Bf?p6aSur2x#-5N zwh)0Kc-^kP{92K%cBp{$?cl9f#SpV{D|z|@Ae|(7o^4sKFi-^+|w z8P=wQN$<3ol$IR(`(juD7p`o^2;)1q)P8c+~(;|8l&Q*RRH0|;Dd%?Ci380IK zjVoWZUUe}oV&YFaJou8Ioa3Qm88~2rUfM)oFaj#US3EreGkMm?VRrkd6W3d zS)+p?0cPcE-@SokbpP$7kiA%$r94&)UCf26qs$lUcicL%pbQtb2L}c3m{e_bCu;F$ z89u&UHdfO=&GCR$_?~F;5iwU?93j2lV}@g4`OH+L76yc+J5sVRbP#vfc=8hBL(X5F z@x#OErFJM70l|97;GCRP^Z{`v>Ou#9Ree3#%4b2~EMegk7zsU%u&0Zb_O@Kup#UsX z--aULVYRaKkI%!Tra5F6Z}{#w7LM()DDa%%SP=0I*&`=ABgY@=nV;ogtw3#kWy}dP znvRH9SbJ=s`ypAAU2MCx4#O^HjIyxgygh59e`$81kUzc#7i})mdxF~)*GX48T2@NC zRc|5f$`KUjDL$VrKh~!>FFxe;tp^|u5$OH?su*(kTCzXUGAr&D6*(vVc3?CdEqf)Q z`UosIC;(tSdhZj3{B6 z@X1!BOdY5_Y+Oi^2a==$Gnxq;X-74wsiiej4xUSns1#A+F@Vbl_>9nrsh1oJ2)y#6F zVVSwEFCR3nB#e|>_a7$rvI5sM46QCX-|MI#k1pLX2PucMC~Q3*%#fv)UcGQcOu}=X zw|4AFEs)8k z%ubpP6=*vTckr{N2taqHLg-Zo-!m&uypeqI^EvekO$uk{!jr`7-grcEqvEa?=ObU` zJI~8FjH04ZjG~2xLfzc1P3%V6CKv?Km9tZ~sYKS}p*fUNw=zVVNENXdR(!m@7PgZ2 zf_ciFz`MD}qWrL?@3B3FxqgW4x_G%7ejDXHB{kh#pq=(%37I*#RSqeyvKHF2S-j^5 zXwtt;813M&)N;r)3XYfe4&nN_4DtD7T6d(Djd)cU!%kt|*kN*|_F1#D%v7McAq7sE z;^(3TJ-|qG)g{CA{c*bW3LW|X&0kl$;x z*kS;s0xY!iB{c{9BqLy%Zdot+eJL>OD`N_;!G(d?ykd%;#EU?^^WR&JZc=hjSaVA3 z2v2)zTQu0=U!RnM+kW!&+bQml$WVAVV%bxR_aSiZqBm2;mdr7NE0rBtHObao9OLaQ z%IDZEAPnw$Kku3<=RIg-$J-W3%?@MJ>O8F5g&@du&M$31z8Pv_h$e>FFX^nzlnaVC1|s@5#QXA<$J~Z zRp%C9ULkI>$aX%%c{tdm9^1I}K7ho1PO9uTfrEhkl=EN6Yg&YaE=UmK$~msr-x`A( z{q>B*TBLlIaYc^7-_WEcN_Z>bHn-ICbCNx;==lPFb5~rLV91hOQWwVPYstdjaEe*wBPGPqa%_>c&klQf_}_mnS) z7`5_X6d*l_vCurN}ss{toA+2kp_zQQ?T30!eeExKsK+4Bb-~8&&RkpC|>Y zQ!EWvM+V95R6l{ldUyE>~)GjQ*XI~-!AqCR0%fTLY4HrX?;qT;ia?`4$y zm#aF;_t6^@vT)JaGW|($>&%e-_AkZH7B@!=`{AlNzJhiXHI3e>;DD#(g+|CF$3rLp zZeRWTqQC_pJO$&2Q8sT)7Gta$#+arx=3nY-I< zo()Y!h`0NKVw#h18{?`g-zL9Vo+H1xBRHvJ*rz~kxb<3KXfl8zVGsw0gHDZM1y@Y0 zM6IKHv*a&;}Y9ivgp5j{#r zDGXsc(~iNWW!?|6Ot&6>UkED$BWirq8`zUsyUO{4jdbm{(#pZfXOMFm!&18Vm5(pB zFIWUcMe4|{))jqQW#A}LOOOPh_tgghtIcj5r1_2}B?{4?UC1@wQg~>FK~D7i+wW0N zqLBH8ER$~3-l&I{nSNMef7q&;A}_0ti1F6CO9x$9JSva#qPx4Xp{OmtQzQWW^gUjS z_mo~{!Pu_R(PKot-wr0>BYJfwxjFC*kGL@-&@JM0HiCyOSWyejqoax zzk(ayJc?+V5CfGbhPie}=4a#KaC?ISO*bkZIzu!Oc1uXA$a+f2_h2d89-KC6u2v99 zKYWgGgmcW_YwUo|^2Gb~vp zj9^j4`8uFPwHS`^Pbt+3dnWlTgW}VKrzzsNtbuc1iZn)62UHYBRe{K z1q0chL#&~P1!L7S(5t|ik7+{0eY20~_PL*S@3mWB;uqGTK@s+E+#jtAF?E;OS<#ht zmtB$nmvHrmsj{oC?0q^r7 zC(Q_qJ!?_eTpp!Vo6&vwbcLo`l>JZ70elQS$v9kOHIgso?ibr#_b3hFWGpsQmIUNJ&

    )-v~ zTeLw;>Ku{(m;CSfQyhT6=cvuuf2_t+DmIOPq9~YD_=NW_4gJyhao{EcEC)|r07^|C(FkeJJ3nknrJ2!D`rZ*WRe%Nz*oLWYN6z-vO)NJ z?Xv5=yRmysdt8@zhF{U(Z|7?$llgh!qsH8uB4XIwgE@?>8XyiMgGc6gfyXlwakvQ{ zOswa;$3AE6dKQ!7r$tQ3Ihl+TKdm+HL-|uC3a-%eTh`u&Punb5+VzpV3x2Gi&%zFi z_{}EqR8*A632`xt=_CQ0#4{X>KlSnXVIM_Npj%5|*MtqWy=#sSaSc06PNp-ry}lMe z+UA&8b#&@hj!CaNVDcr9HQtsNtbG?>m%q+9qO~6L{j08H@E@Pkl-{Qlp#~EEcRsy7OhM zJeodbl>c_3Kwq)qiYbWI+rAd3weoWCd@%*{makbsgfI_%t4Kq2qInqatJ=FO#>a$9S~Xwz}y2*wlLR z*eWIP6frVR5P6d7n-Fy6KG<$;1ww!O*(0HW6TX`1-*dXLL?W7pH9omF)mn)xK4-$fe5 z2jqe>rq%lN*Pk^gO*TE|Q+hkzv{c}|n7NeO7vt`&xYI}3oMjn0>gt?b)-!;J52%`Q z3&Mg0rAR%x14up|rPd}lJjlz%sHQAK{u<&cy=TR_c3KYh^qt3T)|(CJ;NVobj)(nV zp5b?pJiqE=E?H^VY+kTwE@~uvL2a+rZkr)s(o$I91! z=3X&;Y^mWpJXHAht_>aNd0G)q;Lu~3Wi|K`VK5w>_vU3&jd#>$xZ_XDWLEvjlaUFJ z)sE5-q70`eL-IId@MRDs@s2?cp?f4kzevMk+srPrHk70*fu-7V2li4j+k0h;lDl2sJ zlEx{qFKC|u$pOGR<>9HowjLu3lc4luD3{f5XRR6ZE}NmRcz@u6mn z^1;ZyN9(e258YEubGFB_Ur{nMC97EfnQD>E&*MQ~X>x-jikJNrwaGlWvuof)e{%D^+bH`?-*tKP-mqm3O<>pJNgW-}AoX6K zP~A|U^I>pv;K}c&gVvb z4&=jD+=I9F`hyXQvRn1BSkKWU$2U`Q`D6Wq{63WzY&VUV4S#xjTEe66B?}R0 z^0EDs2zp>}A`zciCHVJ)Ep!m5$t`-r^^;5p$7KxhU2Su%NVA%cfVexIjDh8hi_Y}3 z1H`M>`V1F}@&_Ssf@NJ1!My61Te-222+r7(F!}LDIGwg*cisXcF|{KHy_&B-HS@kn z3ehbJzxaFx-N;^BnpJLW7zO@ydwZY!Ot@pK^@8=m`sh=A`c1Yv@sfztsUu62YS2p# zP`7G9l1EpctM52apd1_GpAObi&9uTwSEHn06Koix@`nh`0v_%D`UZtcHT-@b4xNke!V`mcJ(~6i z^^!~S$+;ITJck;)y&?=aYrl3gXQZ0(CyQI*@tHxCjys^D6@UI02Sc->PJAiXz7C1J zy02U@J%PmKApSk1ef{rRBzL1NZB~lccXmpMvdJ;xdi4b2B=U7neF4m1o-py&;sJOb z?-l}vBp;OkijMV}*VFS|tM-dY9(R*zWUcMC<)qZcT4J;lwSlm{rbY)S(ckRYPiJA^ z;iQ{W;HjaGdXs;(oJmOQcgN2&FJSuS#K8|e@XfM}N%e&zMo6rs{^KlhbfcDW&|C|% zH~4OgW||?KPta4cB>j6rg!T0z1G`tY{j zX5s?_2T+wD=v0Afs zl;*Si*B}`G*F{hIo-ftbctn2i>De8rCqFVlJsGK_de&&MjDg&bcaR+&s8$EY7LW>& zp*gjrftDElcKQ^MoD;o#t7S}+r{j2MX0tB~c4;s<7wj&gTS)MpL1>Jnxfu3%2KBqi z4$q-*4?OS*;hNQ8>P3>RfhqlkK zBi@RZeAjTDMpO!Ps%ef%y?Xf`wWy zyf4ULx-?a-HtgOg59Lruyo{vc!vyK{dJ;Wv&3msj5olh4n^E$vg=m|B*~^w2Y@I_x zzmPp|c)gEtj>t!~NmT4MU93EK2YQ~roHV@_LAZhP!AKnu5quyi%7q>^aHj}@e2k!G z={8{EmGQGW7!Wu0M#fKrGR_Lf_dj)(#Mq1JRfu}C3o4pi@bYTk*~E-*!nFMN=Rbvf z1^0Y_n+$I~j3u-q76;eL&S7HBC8V|D2|KN!|5&V^_Xd%eX!xC<*TNfZMYB4cBSH%y5iS;ezGU?^Ly9@=^1o>JzjMUx6;hjPCJD__M7YEv>3 zqfYe6j$^ggZff+fvJ2u49(F^i<6b_y?Q|bEhCda%TpztSfU=jBummaqt0N=+zL)Cy z1HlEgvn4%-ZuZ}NhKP#}N@8wTR=zIcJ##H-X$cl=||8R85gth z-zqt)T%4B{Z>}_6ZzKlGiAGrpiAK>FL?sQ?m@y^+sy!8XpI!Ew%a@FjZ;EkhqP*)L zLx}=^kIJ>kg^J_i%3%swk;C8ZS6H(qxO^IuS8a?O@oz$$wwS`9Xl8Bw63rv=xfc6* z2XU$CgKJS7`d?SZ5-$GoEx9-$JIy{vZg;H#1u77miPc&>)91F?Tka92U{ZgkRCJ-T zO5yJfsw^|GO<4&0#7n>e_A|9?6a$APPt?BS&{71<&QZ|g;bK1l`9ZqyZqzk*S!AYc zhz#{x$HHb)z#y)WwV-_#5sfb-WS(`VdP7{Mg%8?6uc3iMJF8^_1^wE%3wOMBGqR;d zgaq$2WI&ax>Bb?y{3742aZA;3&2Q(U7e+xtGVsOXgs&$Zhn>f+vk%cYWAvrwth>baEwtO{?o&O>3_fPQ*s_jaTz) z*SIbJ`U}~~xOxXs0#>Ay2c&72IS&C_K>8N#atU=eNDf8Mbc@2qC+}x$oH$=+`{*tK zReH|g?{!38LV%d(pn|PbNxN`oCfmAQQ~d$(gjqbV=Qsw`UZoW!w^ z4vtp~{w4Qw221tUZY@M5QHI!x3`{!5REC}V1E*;oNQ#$P{WI-6S2xIb%%zTAqC+1PDT zZ7C@U(6@!^orfj`fNi$A;u!pk^DTs;X$qpcXGr@toJ7nxgGp)CT-pcW8J!yDp`XIJ zYlpGP)OjC5uS}Jz(5M$f}+ySDNd)~M1$RQ zx?-$s-}A^j_iwr#5kpkTEg(;i4$t0&;3|@WlpP7hEWxOD0CZqRl*)XZ&n7(TsZ$57aVx8fWV^_j-cL_ieR&gVn;a7R6DXnLKI1LQ z-gt$CrsoF3NkLKTIy>S`>$8x4rURw>pU)4-rKB78BGP61>Eg~LI)ZfbM`gRhjD$&3 zU0q~)WS3C_O} zn}AKbR2kT*9VaO6+evX}KR1mb;&HH>Z7nL5Vo48<%ofV6e*uYLvN_CZGb&+BlRnrt z7!BkX=Z?+0UG3#ljyerJ+%{(mRI%#`aIVi!XxKf|McO9nrL9 ztGm8ACW;X&XE?ui8`dwrT+(I{zGM8co(K9jBFABhVeN{Ik-)~&t0B>1Cg7o}5AhvM zCHDHzVypZ|nLR9v4`ebX#Jk zJDz0@_9VX&vm`fR3w4xBUGCeWFs&UQ{uW=#KW0t+Lg0R~*7W5_{#BAI%N~ zcbH1O!_E)x$fdK3nP7=(TFw(VLFo>`5tl_8_A==*b0OJW4)m?bkX}pZRf{XWlzS@;#+VjGv{ws1`D+6|!B6cYn%ocG?27$uH1d_S)Zl#KY6$ zJSScB@_JH?`Uj)0KjVKOL2a!}2oZkQ+lvcfa+Ml?8lMxp!FeZeggS z?_Yj(TNp@*9rTH)KmBQRfsg-d@c*SDHSAkxbeKP_l6{ws1GL-Gl(v$qow=4KVTt-A z?K1p%hd41L4N&z`mg%{%muL&Lv+nmKNmM6EAaR$N-{y)_Xn+Dy$gbwyhh)MCB38!5 zbeU3H>U}D?D&e90ei*3Ntcr7iY`VolnyU-FmfFA*%A8+yA zE_AN_)rMWL?T!%b?ci$@`_wj=s*yZ-Y%)%9@ylI9E#BjAG{5XeX4b5-1~V*PpM5F- zrNT2bjja8^_1C5%*2eP{G6XjNlS~g~(mqc@(FX6g%WSTH5RrCUE8m>yf9*`HBSk9~ z?j!(c_t#-w!R~}o(i)XNWISG3n>o%lh@?f_v?nk5C?7Lsu=(a8(m(&M6mL>$>ga=8 ze{2X8)@zS828vTdMiNFP?Squ0pDqQF$+fTE^H6)Ac3wJq+)nF!V*l<5<-0GS`#qnf z&ZWG4e>8s^^rjyq8>l(%^`GVJZk7GLKwZ|?mp^t#+^Jl7ck3!)(ivUkI2-gW<)FdN z!1A-^k8 zPhtRjbPVo6kEZky!c-i4Q>CteWfFgV32*t8p1G!R^ZSfGli>;Q+X8c)rm{$_rVPC} z`rj?z(6wJ*npn~(Z*Rmrck1CVzx7O+d)>?KEpYQF-nn7EfHGI4W*uCmJGP%$Uy%3~Wuv(NEkuC4TuaIveVlvj z)vDdm1Ik(&+-Up;45&cM>$i2}1m}9sqq$c4Wm_-8aGd#aIEy;Ayjh#6fTaBBiSzLJ z)z@G4cOj^rda16a1GgNBeTOLLuc#LY2s&{IS8ZO2Z@+RMWlI4S-~^`7xC>JUE7cNi zI^_mFh(0V*1~GV1LuA2kZ9SaY5SDBX;J}kMli6H?}TCevSm2oq8_LG>@{x1xt{FITxR9ZmXc!O4G_6W>k#T8 zxrG34e>hrB>O0YI?@Pa*x>;-P^Pr+UL_6)i7p^zC&n(N*r%FQF^_j!{{45_ zZrGLC_GGL0-^^Sw55=%u8S`T~yAQL1H|-z4h{1=S@?3&es$H#(<`gi{l2TG#+u%vdYf!aoKl;&BYNsz-{8HomdB**#0W06}E7cGJzjfyo%gi z?|%Rk;%Vea3H>`lh~GQN z5tv75{|BI0*H|!>xBxF+k_c2J;ymaf*aD+AyvF?1-z;k3QbAUC(>EY1+wX9Z{<@Ly z_zZu~m;jyMCQj@rp52}<-0ZL(BJxwX#y%@bdBBV%!h&3y?5l6VZWSze?8C}Cuyw0b z*cU0);S0Ue5T4ShA;GlrZ~K{vNV~6lMKhlfX9B(J2n9I=23+0h$Iw?wq!_UU9tWaS z8j<8cM3QUCJ>?y;{btf3`!`^-lUix6qrds~yG)AjT!rUlZBG*@*?aWhM*k>tSBJKS zYnc3o#G!RzN_}C{{7&G&RMco07+ju#Ha{wp-jxs?14hUD$X+7*tko8W9LpNnNjLer z)0Z*9xgDfR)=5=p-;>zE&-G>S?=npURu>e&-$(VSic)iDLdfLP%Tyc>#SB%+goD&R z{XgxU`9IWe-}g(&H$ur$_KJj16xm5A+t~LpDogfttTUrjghFLWV(h!Ij%6^D6e0UE zwy|Ux#xk}s%oy(Zo}Kr3E$4Or0r%r^&CfH(=X31GypQ+m`4U;NZ<>qM+g4C$7Hs?H z6nC^#1f8d6I&z#&3RD5wC*B91m_&LldK*R1jX0OmBPDvKct!)`&?jb~Gr8|o)X(9<| zJJeOI#wP21b^pLpw4!6~tdL3--75&1%Zc#(P8DL>A%Uv1+9WGj>leFOBB3gKMOs(8 zqnXqMX!)v(k-T#`hjw~8RK#TaYyjX9@A;oe#qs!Khfuj@mezx#wVkTgyS{rXVtM@5 zH&#OT_dnj2_oBR=?T+}^o<4?whhA}4=bYWtZUdkWK<@;IV%iSQ1uM*z_R#TAKg={> z|ETaMxCWa^TdQjlyu%m?8BkkpJxN4ONfc&=IGwND+WA)Dld#|K00W1FnEJ})+H2A$ zT{0_wf8UXMC%9Ikz(8suNE~`daSw=k}c@0x`3{kP17z>`nDiKQ^$k|U#E4+K{%0crOX>oat zOAc7})c{Sv^$|rD9-MVNSn~xo8si!*Pz&&VDUb=GE8`sDcMYk*os6!pT$$RKEf4zM z2}*A+z(4TPXQ419;FymaiH`UB!g5H1v+z3v9=Z9oT4H=r)Cy7?c3WDR_$-($RD{55 zc0ykiqIze>KaMUg2~^km1`hVbAX&p{>kR=HAazZvT7dgFAGm5HRBv}?vtD;X(wpWx zFvF+%<-T)puokrcWVl%KWl+PW5Ec6_c>F4l(>Wf-XQTsCyB%@jQHRjua@{D?MfUJ@oRP&2ws?cy-G8)vBl1 zH(G8?u>mG0R?tkx0;E|sFS&j6EJ6|)i^bclvCC7MiiI}N2Yb{a;IM1Hn)-|%cF{3- z{l^)@xJe<q7q8Y?Yurxmn@sAj)_X6tM%u zj6VSlsj{<7y>q>1ozJc9-e@kF6s}vI{N$wt1*s0{<>GIg--wQ#0`yo_>QPHbU@1xt zwYy&pw<5xCQ}-XkiNo2!ypIVrr_$#{GHtBjv5YT;oOMO@aNX?R<}Ip}hyV|9vI zT*oFA-+P695=U0!I9+-NsJXgXwY;?bz5~u;YxH{6p*pY_7`pCd(pu8Pk=%2#z7??dDHoIg?iSqA?rzI3ElO55wf0#0 z;?E@n!%wPNi}DDh&(v`blE64aazhr~P+Dp3MIc$-K01nsCRRPH)Fbk=M2|3wB!4LG zOFX{k8X+Cn((a(;(?ZkSY}3twQnpL|=x@`NEXhPer_L4_Ea$=>O$Koj>(9NEeVHe?vw6P0c}#`Gg7agl10rrY=&n5 zUNx1zuW^ghMCRxFInZHzf&tV-*mul>xIZv^?+|aZ|LL&y!{NF`ewr+RjH3If*#sT8 z0knZul=LFZ{2Zo_5}q$?YQsqr7!AGwMHK(EQQ!8e0k8f9=RN;DNrYVb#eN*zMD@E) z4@y)4Of=?Fm-KgmpSRU{7T0Q$1Cl}qsl^Wrp#ISE7M6cUa47hcZ_to;7lC(D~4_r zhH4C1e;4d^tQmg2%mg$Vs1-2i7j2WeuB0QC`@G+oCxgqNX#l!~P4$Kr?+oQ|OS|Oh zHD14FD^A)yIA++lsVygKB&44;~8}dh(iS$HE2hu6v833tK8$#)xYo{*gWh4 zRP7Rc7F8y`pO0WgI}Ms^p32yf1E|Vxvy{d!8uEmMdU<6Lys#y%K3M-75UmZe=-yr2 zPL_v(^GpkM^0{hQ2e{BQi@*{bu-bigg9iq-+NI`6>nTjBh%|KeH)6rbWp^{YeS^o) zH3HD@FW!^DJ21Q|I}QE8o^T# zXWw%#`xHe#7067Z{HzpL_S=98#XXU8n-M*|qKeFN3`I_g;4MN_G})I5ApHkO*(v7C56-EPhaXnzDvXgE zg(Gi{H)J(7+Zwk} zMz>yN>Ov7YI#Xk51_MwhoAz^=Ha<2VHUt2k4ZD7bWzenn4$v0KIwTfik(Es(Aj(EP)O>ULikJjLB?cLd6*k;|5SrXpd?w`jXdsdO{lv-CQfXzZRmPyl z^_c`hSH6FahxQaS`Yr%BLe&7aa^38|Em2f3>cU(?`OrDP8{xx2qYdw6QfDL)d@J7^ zhnV}nK-;%k@p0_;R~{8I9~Hknd{(<&PyB2dumW3R;`(uz`e-K*p&vRU7YICW-C8$4 z1?kOhQ30xdj&=)iUr_}r zijW8H<#RsL$xJ_)960$+$r@bHY!fv6LyCfZe_L=19XOk?Dvb;uHq44-PPGrwIse8K z*QWdSocFVjUic|F06kko)J`6^!q<2xt~QedLk{ymLA#=!kxikd?-OE|05mABV(|0Q zspq^K!)|;nA~y*zZZ=D|D~xP`*Vek>h;aP|rwMcF`3<(Lor(e)rFtW7Tidy*Xf@G$ zlw_MY)^jPq_~p;DdM`iDH#+h z{QYCr<$W4>YJ^e0jgFGT?zb|XtMvxf-NhO2r>pen=ND~Yr};0~?5+(ZB&-@0JWrg~ zLaR_`QfZD;r3gbty;v4Yw;!4amRxpAH~U7ok=oo}tU6t3g#tWh&pgXP-4g$6u{#he zamD_d)Et|i;xwdEuY36nIq712AaK|gbz*U2w9bp50C8_zv^z~Gpv3C;#V}hI!xo)P zX3eyTJ;Cf2hjGKH25XoK{fdc9Z&`hgvca&l!*c^wCCcgS^wKlZhm4ezwl5by_w_tm zU!=00mrLJXZ7qq#FZX67t0%&zo%I~|69IorYBW7WCq$4w#!U>#H=$**lM03od_FEE&R zqzu8bc;mSQ^BHlh-F5wfo}m_Lb}%qTxk0*@IX;sZIXCB+93T8O6M9vueD_E?Zo6k9 zC5v(;x4!!+eHWbw!YLuw&yg=@Wv$O`=>G7Ptn`~fcxWo!+NU;3TjzsG3iVChEEBHS z%vO9sfpbaH-bByd%#?wE80k;Ci}5t0L<(8J!1-7QrlM5+i7tPN`#_@PEUb!3!7IGD za`~Pe$9Z}Rpc=#lOEk`|2XmmFDhGS?`P+~SXI5B!fT4OGbBb!o9k3|S~ zBHOucJxn^Z*$JHL^p33$e!&iU@=`nP&Ir#e8^X>mXRv$~#Vt2?s2h{g^Zm|Z$s%Qf(pP4|t4l~XBQy16^FDVDa#5t0qKtQ%lvfBBAY->bMH z>f#G(IZG{M1tm-UP)s9}=o>MX1LR?=@DYAR4WB(Dc3dKyPuwmMdN9oKod=w}i`xzK z`ce0Z)JIESgUg#A_P*6%x0r}TE~uN|8A zmu$>2p&@(PzYA3*D@!%3P@&ThT2)1UOHd6>LE&G@S78w2D3tH#z_nr4isNKs%&*rz z_rVX{HxPWv$V$^`?iLDP%WpOt5Mq~pR*HRMcSXAbqa>@;qZ%qTz_WDyHXtspEb;XV zgtTLxQ=SBYVHy795}QGBavxO)#xb=x>fbvr-O#A>&NVk4W_^A5C>Azn)%{LUI<(GPXC!$rl_B-KvW<8b zP*Vo?P$s?;pPll2LLJyg=fMUKAV()v7ma|Q0?VW9#ziK*O@>IEs-PquNgI>%F6rfy z4^ZF5@unSNTis$|$08+WBvF=2ZkY_MoeZEkOEY?)GyK?HSx=TmnwTBbmp*x5wQ+@k z8Byyq)9#M?j$cXXG=s&JmGJq3TlFn>cON28rwO{xEu@a1OYMqLmV`?c6zn=IMPS|5 zP*m9klepAT*Sxv2%Zol>VfrPDJcilLea(SePI(llr^ca1zlHjz+;rf5RCzo(5pDYn zG*Y5qGeDIvF+NW12WK@$cOp)y4u4H`8!Di-J^{(mU{iHZn~E%NS1D&ji|lhhWqv?i z75mtc);j|LmyhD-%EkkLf=x4CZ^PX)Wnv=tU=B)uhK@n)Oc-+xta6L(q8N>1#iekO z1(;|OfvL8c@EN-P0kail7c~}?F=00|t!z`8((^JUtg{YxJ?WZ>dh<$WUwDI9lf=W% zH^<;*hz~|G>&WzVOrxl(V9FoGw6GOpHB~DAoViZIfkZVI&ArHFs!Cfy`zUIbg@^Ou z-I-s9j2_#Mi`2l##+5qilTxF5N7!9VdHN5gFbT_|Pre7922=iVJy%~SiMm)~g}C4t zJ*mRLp~*(G=-Ey$=1=>M(+c$Ek#R6zKM+yAiRK2+Z`eh@O2zsX7!Hj`k3l6${TR6e zp%N9ZIYtYq-9h*IIhu%b2=1z#;dh2HJ{Gm>WE~jC;ZRSk?T6h!z}vxh8r1(dE*3VKIv&x5Xi$@M*&P%-_S**R~pNGv>xf^OJ8h^OqbP}>-L zchl@FMTeGF>{E}dlc{dQVxR^g&Gs{Zi=-heb~T%c$hY8UcEUmX(J=HXSarcsmuGjq z>S=0&(5c<0uTVfLB&jRE;1EVB6mlFU*|3kttpCo8ng8`2ncjnGOuMH;_@g8f<~i-J z=D7ILmf21rIn2}FWKoOf=EJ*=`3S$2qs2@^f5+|vVC-(Yv zlWP=@aVp=U;Ev*(sSHR+kWSRd#3rg#x|Rn7Yol8b10AiX1q=I>Lhrj>7tIDopUion z$3|79U1MZc9N1wr&lUnLU~6`09Y@w}j1XcXUcr=)BecJ89IS%wjW@wT!yaSBMT<}H zD`%h0NneM-RxPD+87#f(^vb*a{9d3XvJv-Qzg|TpxP%?k{E`;#;ykY7pnzJRZY?R{ zQVY9}Z`B9EX?`jAmqHg!$WQ9(>?;b<*yH2R!XagUQ=~r1H-yAch$;=Erf#toBTNRPc zM?QvY=+&n0Jqq^j73cW8;qeDb1XMp+o@R!1QC+yke`u~s2r3s+bbvm6AY$>NU{!?> zlR&`+V>1h~td&msjwBa3{Z{=<@5Y$Dw11|lFXU<%F_jHVJOr8Qo7Nhc032Dc`A%8r z8!;=IOl#|O)RdGP!`xL#$sYzthAp5lTHyGpNN{4wF3#Nc&dk+iwk#RfFw=?ah`5Yn zt~)jkDW+?tdHU}NKedMf6KIQ8Gx5xp;*Fv7n~zCugy>ZJJKoO>rvvc4_4*YkMVw(? zZ6@l)%8HG9%q_R=Bu6;f50Asoo9KUE#SJn0G|p8g5q4M(zTDuo>a8A2XTyAPC@eQT zhnS;~2+R5Q+_=N#m@G=+Q_46p*36h}mrCrJSzB@Y)YbQa5H7lwS9qg<_tH70M3ofV zgXW{rCfJ=2i}O;+DWExsfeBZcz<_I>%v@iq!3L%RlUFfEvjM@c%zf|ga=%167D1kTu-&X4wZF?@BIVXb0PSMV5+ z7w4cf`N&lO8KpLe+*Moar+Joi5A5&snN1~#s51imETGI9z=$1H%}iZ5zTd}nGTtuz zNSyJNT6Dzcd*f(9DsZ8xi)o`cfiQ6imKX)9Z?H-el}JMc)sPev2Gr#KD79ZeIRD0N ze5O&W)AhYerDf^rdp3&qLpX%uoOnWcyc!lp>!1HP!YVpt*e+w12^s-D1Py`%RJ{*R z^YB}Ld}g884AIb3Se$G~QRpJ(N6_&}8(Y}9?<;9az4i)**vI#aw1-4_zRdXtdtSD( zXwZK&_3Ip<|8ABOiBhPs)_1Y+G^R_^w^+FHLr7{K7`T7dBTOO>gXUPT+qOIy{3@E; z_d3V>2cBx-0rFoC?{&AIyIhJHlJ2|F zw%!mNqdQmSpn6LmfzWE+U0`~gM50#JJrXuQ$%?7rP%HSK@e`t>$udhA$K6*3BqvIa!yH4 zgh~!BK_`j9i?SW!fF}&wh}9Akx}pZ*po z?5Q6+yFMc=c+~c+z4Tc{k+Q4fp^OQW!816{Gdf5_eAWG$vlDf7Bc?o<9qz*cl9Ihe zE;B*gckj7bZU+`!rG^4x-)T)+DfZC(O<&dGA$#M;k&1m%zAdw(+M31tVKU_*QiQ6) zrvwxn#Lb2uR^oN2>1Tx^%9lUIGab&aN`OYo#`|>Hk-@4b%bhGDCLN$DlWmF*@D{X7 zSvf-YnMu||KlYKB1}TGBb<&D!4sLzfpUF1#@qlTQ&C}w!!T>nv65~_%GUM47W%Mvg zsN2cx1${!mwIdtI>`6PaOlkAd|w?ot)u~BNa1EF`yn^DzE2e?s5@>uzyhwtjQ6HEa;lusB~CZB)%hxqrMir7j}WyLKd^aBc~4mYpeJN&2q>qY9L7 z9glJ1eN7w5{4*aor)uY}qaMQLp>;YXiE!Ii71IqTemv~Ltj7506(c;oq}n@Vh4yQU__I$BocP9_nP zmWth8P&SiFWcQrKMGu3(vj-n*dRJ8QZTLra3{|=3dHQjR1UUw9^($sok>1I{aPQ8ra~$7-Gkjv2zAA)_+Q!$2Z&rgyDlj*aD) z=uxZ zbS{iM!Fy*l{O~qo^d@JRs@JLs&%?OXp!L0k8CIEFoc%*bHj2$=BR@W=ri}R8eHty? zu&Hayj^^h)^D6MmTQCn?HU1Fh`}JssWhdy6yxb#9no zAlt%Gc>~N3o`ocD%NBRUvnCj8h{8@BN08?9Zxin>xb;QerUjL(NXBMIPo5Sy$?Mr54onIhMP8}iazgG4(7!nS z{b=M@=0kzGux4wMO+wBN(;2e*en`k$@!*8%+;uijzS?I4D*QQ3PUGDs@j9xQvCjLu zyvZTz5=Fi^(tPB_1R@ut8`@jUQxMO?IA8Wfxe`Nvefk;}etVHzO)iTpuYcPlb}uJD zmu$_)lZ0Ci10$p*T+I~mhkBnTkGPu55fYL$@O0uf7%jA1qpM{-QcBQPfQ&~D)H&p* zX4s>3GVp;$gW=9o0_4|TL+#KrWxR=@K5Yc-lV&AMrn!w->R$94JnEzOhNHc#~}}q9fW|wmwfVhVT8C(VP<1I@~p_p zxS=QYrAp#&T(qupzA=tZa9UY*7>YVY@QD_QV!e))A;YxNM+?z5by|IbKp-M_@dbzT zDjYVRQFS+n*TV2*^z!dMM)~TSlo4C4BW_q4*--8-9W=IVMR?4UnGR_vqI2=p-50c5 zd+Ko)5t!d2>P(c>o4ntG((Gh1pNcg0)}hs%Hq#z&hjy>zPaC;upUgFV+2oElxh4~2 z6davyj|}rdrXBsR2>8HaDNesfV)d$u;T*Q4147%k{A1G@U|aB%*F0{_In4`^Np2)F zSh`{Q3`@d~`_uydpbmbV6#8H@NRd=gRV6DpHi|!gAo;NLN(3RTL{KV0Rk5XA&`q_c zxtL#mD<{Z=m?%C}m-!dx!%Eo!^7KJ!LU-r{)eFmAjn&ju$GG_>zB4Rd`5uI=ECr0lm2eOdS}IdtdJ=Q z{e-IVjM9%W%j-Tj|J-Gd4KSh!^K9)+Q69^-+xN5j)!d3ANhO1+LL^V|g5}g9xIM(7 zzc4uA7SU*1kwUWT{M2E;Q$trtTRcksxXKkCfdvM$d0&!KDk)X17<>G-4lM;X@e<8U zZ513c#oV-eZ&FpT_r)}o)yvkMRtrm6&w53ET5*|+e>rR=FL^#*QR0%~lV=lf*>HoC z6E#e`aZ!0c6w01jtD<3{G_GRJ0s23%4l9`uyO=<(w^dUaKmzBKncnEO@-AA zrK8?tW1W6p&Jj9|KtV#X`m&t{w!{jnHtw0mH`MxEv%8sI@KQKvF_;tx9Cgzqdq-m%0~Yz4 zNVH!TgQ4k~mf|vcwCw8Mj3u zB{JWWZ!nk9h8EM@me7uFKNF<~gK7u+3rJht)dETI`)%}_x)!dW;F@;iv`1g8fVvyC zq`uiDX3)To@B?c*gzoT<$Wtv;OAvp&(QcT2swWCtW;1jv`b!F$i?lpN$vn2I?3Pw; zj6<~O34XSlRP*@RbsfLkDRz7|8UTBmkn+N%s0;PX`d8%ztEhAf;f2D~FGAC!Wzjw@ z$SZ4UIRS6|F*maPs`9f5AUkMXbif;a1#gH<|5nb|=*Z0cFBKzN9;<8bj#=Rqk0;z( z9(x27t^4Gog+_J&jh8Jrzt(225Ylq9OAXWI!5v+K(muv+>VxKC>J}i9fdl49Ak}JZ zXfwppxIC$@U%vVtKGhw8?Wrzeg1dN))HjcMGo;mK8KqQM{i>^LP>Cnt8lAg!A5Y$} zPUe2A=dBM<56hgsMo>itr(2pBi#2*rK%t{6D(D&(S>jS}MZO^WgbOqQM}aI66or~H z)0tz<{s>uq*3k3SRqEBK@h2D>FWlg)>|#BkZ2LhY`?9XDgJIiYu79ui&lmK8 z^uJyApGM{1Dv$r|x_`}#e<9rb|8@iW-&yxxS#JNo%(~48fS2YQ?38T(3Hs-ew$TH` IeaFcE0}9b {rules-ui}**. -On the *Create rule* window, give a name to the rule and optionally provide +. On the *Create rule* window, give a name to the rule and optionally provide tags. Select the {transform} health rule type: - ++ +-- [role="screenshot"] image::images/transform-rule.png["Creating a transform health rule",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. +-- -Select the {transform} or {transforms} to include. You can also use a special +. Select the {transform} or {transforms} to include. You can also use a special character (`*`) to apply the rule to all your {transforms}. {transforms-cap} created after the rule are automatically included. -The following health checks are available and enabled by default: - +. The following health checks are available and enabled by default: ++ +-- _{transform-cap} is not started_:: - Notifies if the corresponding {transforms} is not started or it does not index - any data. The notification message recommends the necessary actions to solve - the error. + Get alerts when a {transform} is not started or is not indexing any data. + The notification message recommends the necessary actions to solve the error. -_Errors in {transform} messages_:: - Notifies if {transform} messages contain errors. +_Unhealthy {transform}_:: + Get alerts when a {transform} has an unhealthy status. + The notification message contains status details and related issues. [role="screenshot"] image::images/transform-check-config.png["Selecting health check",500] // NOTE: This is screenshot is automatically generated. Do not edit it directly. +-- + +. Set the check interval, which defines how often to evaluate the rule conditions. + +. In the advanced options, you can change the number of consecutive runs that +must meet the rule conditions before an alert occurs. The default value is 1. As the last step in the rule creation process, define its actions. From f617694e102c6e7503814a865daab3dbfd1dd038 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 21 Mar 2024 10:33:12 -0400 Subject: [PATCH 093/214] [ESQL] copy ql disjunction rule into esql (#106499) Similar to #105711, this migrates a QL optimization that depends on specific class types into ESQL to refer to the ESQL version of those classes. This is necessary for #105217, which changes the class hierarchy of the binary comparisons. I migrated the existing rules tests, and added new tests to the logical plan optimizer tests to cover this rule. --- .../esql/optimizer/LogicalPlanOptimizer.java | 25 +-- .../xpack/esql/optimizer/OptimizerRules.java | 91 +++++++++++ .../LocalLogicalPlanOptimizerTests.java | 2 +- .../optimizer/LogicalPlanOptimizerTests.java | 43 ++++-- .../esql/optimizer/OptimizerRulesTests.java | 146 ++++++++++++++++++ .../ql/optimizer/OptimizerRulesTests.java | 69 ++++----- .../org/elasticsearch/xpack/ql/TestUtils.java | 9 ++ 7 files changed, 314 insertions(+), 71 deletions(-) create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 59f0d46bf618a..2879173a6f5ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -69,7 +69,6 @@ import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.StringUtils; -import java.time.ZoneId; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -130,7 +129,7 @@ protected static Batch operators() { new PropagateEquals(), new PropagateNullable(), new BooleanFunctionEqualsElimination(), - new CombineDisjunctionsToIn(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.CombineDisjunctionsToIn(), new SimplifyComparisonsArithmetics(EsqlDataTypes::areCompatible), // prune/elimination new PruneFilters(), @@ -1118,28 +1117,6 @@ private static Project pushDownPastProject(UnaryPlan parent) { } } - /** - * Combine disjunctions on the same field into an In expression. - * This rule looks for both simple equalities: - * 1. a == 1 OR a == 2 becomes a IN (1, 2) - * and combinations of In - * 2. a == 1 OR a IN (2) becomes a IN (1, 2) - * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) - * - * This rule does NOT check for type compatibility as that phase has been - * already be verified in the analyzer. - */ - public static class CombineDisjunctionsToIn extends OptimizerRules.CombineDisjunctionsToIn { - - protected In createIn(Expression key, List values, ZoneId zoneId) { - return new In(key.source(), key, values); - } - - protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { - return new Equals(k.source(), k, v.iterator().next(), finalZoneId); - } - } - static class ReplaceLimitAndSortAsTopN extends OptimizerRules.OptimizerRule { @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index b9018f56e60de..645924907b6f5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; @@ -30,13 +32,26 @@ import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.ql.common.Failures; import org.elasticsearch.xpack.ql.expression.AttributeSet; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; import org.elasticsearch.xpack.ql.plan.QueryPlan; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + import static org.elasticsearch.xpack.ql.common.Failure.fail; +import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitOr; class OptimizerRules { @@ -139,4 +154,80 @@ protected AttributeSet references(PhysicalPlan plan) { return plan.references(); } } + + /** + * Combine disjunctions on the same field into an In expression. + * This rule looks for both simple equalities: + * 1. a == 1 OR a == 2 becomes a IN (1, 2) + * and combinations of In + * 2. a == 1 OR a IN (2) becomes a IN (1, 2) + * 3. a IN (1) OR a IN (2) becomes a IN (1, 2) + * + * This rule does NOT check for type compatibility as that phase has been + * already be verified in the analyzer. + */ + public static class CombineDisjunctionsToIn extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule { + CombineDisjunctionsToIn() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP); + } + + protected In createIn(Expression key, List values, ZoneId zoneId) { + return new In(key.source(), key, values); + } + + protected Equals createEquals(Expression k, Set v, ZoneId finalZoneId) { + return new Equals(k.source(), k, v.iterator().next(), finalZoneId); + } + + @Override + protected Expression rule(Or or) { + Expression e = or; + // look only at equals and In + List exps = splitOr(e); + + Map> found = new LinkedHashMap<>(); + ZoneId zoneId = null; + List ors = new LinkedList<>(); + + for (Expression exp : exps) { + if (exp instanceof Equals eq) { + // consider only equals against foldables + if (eq.right().foldable()) { + found.computeIfAbsent(eq.left(), k -> new LinkedHashSet<>()).add(eq.right()); + } else { + ors.add(exp); + } + if (zoneId == null) { + zoneId = eq.zoneId(); + } + } else if (exp instanceof In in) { + found.computeIfAbsent(in.value(), k -> new LinkedHashSet<>()).addAll(in.list()); + if (zoneId == null) { + zoneId = in.zoneId(); + } + } else { + ors.add(exp); + } + } + + if (found.isEmpty() == false) { + // combine equals alongside the existing ors + final ZoneId finalZoneId = zoneId; + found.forEach( + (k, v) -> { ors.add(v.size() == 1 ? createEquals(k, v, finalZoneId) : createIn(k, new ArrayList<>(v), finalZoneId)); } + ); + + // TODO: this makes a QL `or`, not an ESQL `or` + Expression combineOr = combineOr(ors); + // check the result semantically since the result might different in order + // but be actually the same which can trigger a loop + // e.g. a == 1 OR a == 2 OR null --> null OR a in (1,2) --> literalsOnTheRight --> cycle + if (e.semanticEquals(combineOr) == false) { + e = combineOr; + } + } + + return e; + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 6370b0198ae88..a30418c69f0f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -54,8 +54,8 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForExistingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; -import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests.getFieldAttribute; import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizerTests.greaterThanOf; +import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index df806e0afaffb..c6747c9d65d24 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -132,6 +132,7 @@ import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; import static org.elasticsearch.xpack.ql.expression.Literal.NULL; @@ -316,6 +317,39 @@ public void testQlComparisonOptimizationsApply() { assertThat(con.value(), equalTo(5)); } + public void testCombineDisjunctionToInEquals() { + LogicalPlan plan = plan(""" + from test + | where emp_no == 1 or emp_no == 2 + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + + public void testCombineDisjunctionToInMixed() { + LogicalPlan plan = plan(""" + from test + | where emp_no == 1 or emp_no in (2) + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + + public void testCombineDisjunctionToInFromIn() { + LogicalPlan plan = plan(""" + from test + | where emp_no in (1) or emp_no in (2) + """); + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var condition = as(filter.condition(), In.class); + assertThat(condition.list(), equalTo(List.of(new Literal(EMPTY, 1, INTEGER), new Literal(EMPTY, 2, INTEGER)))); + } + public void testCombineProjectionWithPruning() { var plan = plan(""" from test @@ -3442,15 +3476,6 @@ private void assertNullLiteral(Expression expression) { assertNull(expression.fold()); } - // TODO: move these from org.elasticsearch.xpack.ql.optimizer.OptimizerRulesTests to org.elasticsearch.xpack.ql.TestUtils - public static FieldAttribute getFieldAttribute(String name) { - return getFieldAttribute(name, INTEGER); - } - - private static FieldAttribute getFieldAttribute(String name, DataType dataType) { - return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); - } - public static WildcardLike wildcardLike(Expression left, String exp) { return new WildcardLike(EMPTY, left, new WildcardPattern(exp)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java new file mode 100644 index 0000000000000..dd9704d57b12a --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; + +import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; +import static org.elasticsearch.xpack.ql.TestUtils.relation; +import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.hamcrest.Matchers.contains; + +public class OptimizerRulesTests extends ESTestCase { + private static final Literal ONE = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); + private static final Literal TWO = new Literal(Source.EMPTY, 2, DataTypes.INTEGER); + private static final Literal THREE = new Literal(Source.EMPTY, 3, DataTypes.INTEGER); + + private static Equals equalsOf(Expression left, Expression right) { + return new Equals(EMPTY, left, right, null); + } + + private static LessThan lessThanOf(Expression left, Expression right) { + return new LessThan(EMPTY, left, right, null); + } + + // + // CombineDisjunction in Equals + // + public void testTwoEqualsWithOr() { + FieldAttribute fa = getFieldAttribute("a"); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testTwoEqualsWithSameValue() { + FieldAttribute fa = getFieldAttribute("a"); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(Equals.class, e.getClass()); + Equals eq = (Equals) e; + assertEquals(fa, eq.left()); + assertEquals(ONE, eq.right()); + } + + public void testOneEqualsOneIn() { + FieldAttribute fa = getFieldAttribute("a"); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testOneEqualsOneInWithSameValue() { + FieldAttribute fa = getFieldAttribute("a"); + + Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO)); + } + + public void testSingleValueInToEquals() { + FieldAttribute fa = getFieldAttribute("a"); + + Equals equals = equalsOf(fa, ONE); + Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(equals, e); + } + + public void testEqualsBehindAnd() { + FieldAttribute fa = getFieldAttribute("a"); + + And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); + Filter dummy = new Filter(EMPTY, relation(), and); + LogicalPlan transformed = new OptimizerRules.CombineDisjunctionsToIn().apply(dummy); + assertSame(dummy, transformed); + assertEquals(and, ((Filter) transformed).condition()); + } + + public void testTwoEqualsDifferentFields() { + FieldAttribute fieldOne = getFieldAttribute("ONE"); + FieldAttribute fieldTwo = getFieldAttribute("TWO"); + + Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); + assertEquals(or, e); + } + + public void testMultipleIn() { + FieldAttribute fa = getFieldAttribute("a"); + + Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); + Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + assertEquals(In.class, e.getClass()); + In in = (In) e; + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, TWO, THREE)); + } + + public void testOrWithNonCombinableExpressions() { + FieldAttribute fa = getFieldAttribute("a"); + + Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); + Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); + Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(secondOr); + assertEquals(Or.class, e.getClass()); + Or or = (Or) e; + assertEquals(or.left(), firstOr.right()); + assertEquals(In.class, or.right().getClass()); + In in = (In) or.right(); + assertEquals(fa, in.value()); + assertThat(in.list(), contains(ONE, THREE)); + } +} diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java index 1cab7dd87195b..bc7e0b2a93bf5 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.optimizer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ql.TestUtils; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -60,7 +61,6 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.StringUtils; import java.time.ZoneId; @@ -69,7 +69,6 @@ import static java.util.Arrays.asList; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.ql.TestUtils.equalsOf; import static org.elasticsearch.xpack.ql.TestUtils.fieldAttribute; @@ -157,15 +156,7 @@ private static Literal L(Object value) { } private static FieldAttribute getFieldAttribute() { - return getFieldAttribute("a"); - } - - private static FieldAttribute getFieldAttribute(String name) { - return getFieldAttribute(name, INTEGER); - } - - private static FieldAttribute getFieldAttribute(String name, DataType dataType) { - return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); + return TestUtils.getFieldAttribute("a"); } // @@ -548,9 +539,9 @@ public void testCombineComparisonsIntoRange() { // 1 < a AND a < 3 AND 2 < b AND b < 4 AND c < 4 -> (1 < a < 3) AND (2 < b < 4) AND c < 4 public void testCombineMultipleComparisonsIntoRange() { - FieldAttribute fa = getFieldAttribute("a"); - FieldAttribute fb = getFieldAttribute("b"); - FieldAttribute fc = getFieldAttribute("c"); + FieldAttribute fa = TestUtils.getFieldAttribute("a"); + FieldAttribute fb = TestUtils.getFieldAttribute("b"); + FieldAttribute fc = TestUtils.getFieldAttribute("c"); ZoneId zoneId = randomZone(); GreaterThan agt1 = new GreaterThan(EMPTY, fa, ONE, zoneId); @@ -1040,9 +1031,9 @@ public void testCombineBinaryComparisonsWithDifferentUpperLimitInclusion() { // (a = 1 AND b = 3 AND c = 4) OR (a = 2 AND b = 3 AND c = 4) -> (b = 3 AND c = 4) AND (a = 1 OR a = 2) public void testBooleanSimplificationCommonExpressionSubstraction() { - FieldAttribute fa = getFieldAttribute("a"); - FieldAttribute fb = getFieldAttribute("b"); - FieldAttribute fc = getFieldAttribute("c"); + FieldAttribute fa = TestUtils.getFieldAttribute("a"); + FieldAttribute fb = TestUtils.getFieldAttribute("b"); + FieldAttribute fc = TestUtils.getFieldAttribute("c"); Expression a1 = equalsOf(fa, ONE); Expression a2 = equalsOf(fa, TWO); @@ -1414,7 +1405,7 @@ public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { // a == 1 AND a == 2 -> nop for date/time fields public void testPropagateEquals_ignoreDateTimeFields() { - FieldAttribute fa = getFieldAttribute("a", DataTypes.DATETIME); + FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); Equals eq1 = equalsOf(fa, ONE); Equals eq2 = equalsOf(fa, TWO); And and = new And(EMPTY, eq1, eq2); @@ -1564,8 +1555,8 @@ public void testEqualsBehindAnd() throws Exception { } public void testTwoEqualsDifferentFields() throws Exception { - FieldAttribute fieldOne = getFieldAttribute("ONE"); - FieldAttribute fieldTwo = getFieldAttribute("TWO"); + FieldAttribute fieldOne = TestUtils.getFieldAttribute("ONE"); + FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); Expression e = new CombineDisjunctionsToIn().rule(or); @@ -1670,7 +1661,11 @@ public void testIsNullAndMultipleComparison() throws Exception { FieldAttribute fa = getFieldAttribute(); IsNull isNull = new IsNull(EMPTY, fa); - And nestedAnd = new And(EMPTY, lessThanOf(getFieldAttribute("b"), ONE), lessThanOf(getFieldAttribute("c"), ONE)); + And nestedAnd = new And( + EMPTY, + lessThanOf(TestUtils.getFieldAttribute("b"), ONE), + lessThanOf(TestUtils.getFieldAttribute("c"), ONE) + ); And and = new And(EMPTY, isNull, nestedAnd); And top = new And(EMPTY, and, lessThanOf(fa, ONE)); @@ -1689,7 +1684,7 @@ public void testIsNullAndDeeplyNestedExpression() throws Exception { greaterThanOf(new Div(EMPTY, new Add(EMPTY, fa, ONE), TWO), ONE), greaterThanOf(new Add(EMPTY, fa, TWO), ONE) ); - Expression kept = new And(EMPTY, isNull, lessThanOf(getFieldAttribute("b"), THREE)); + Expression kept = new And(EMPTY, isNull, lessThanOf(TestUtils.getFieldAttribute("b"), THREE)); And and = new And(EMPTY, nullified, kept); Expression optimized = new PropagateNullable().rule(and); @@ -1729,8 +1724,8 @@ public void testIsNullDisjunction() throws Exception { public void testCombineFilters() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); Filter fa = new Filter(EMPTY, relation, conditionA); Filter fb = new Filter(EMPTY, fa, conditionB); @@ -1740,11 +1735,11 @@ public void testCombineFilters() throws Exception { public void testPushDownFilter() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); Filter fa = new Filter(EMPTY, relation, conditionA); - List projections = singletonList(getFieldAttribute("b")); + List projections = singletonList(TestUtils.getFieldAttribute("b")); Project project = new Project(EMPTY, fa, projections); Filter fb = new Filter(EMPTY, project, conditionB); @@ -1754,12 +1749,12 @@ public void testPushDownFilter() throws Exception { public void testPushDownFilterThroughAgg() throws Exception { EsRelation relation = relation(); - GreaterThan conditionA = greaterThanOf(getFieldAttribute("a"), ONE); - LessThan conditionB = lessThanOf(getFieldAttribute("b"), TWO); + GreaterThan conditionA = greaterThanOf(TestUtils.getFieldAttribute("a"), ONE); + LessThan conditionB = lessThanOf(TestUtils.getFieldAttribute("b"), TWO); GreaterThanOrEqual aggregateCondition = greaterThanOrEqualOf(new Count(EMPTY, ONE, false), THREE); Filter fa = new Filter(EMPTY, relation, conditionA); - List projections = singletonList(getFieldAttribute("b")); + List projections = singletonList(TestUtils.getFieldAttribute("b")); // invalid aggregate but that's fine cause its properties are not used by this rule Aggregate aggregate = new Aggregate(EMPTY, fa, emptyList(), emptyList()); Filter fb = new Filter(EMPTY, aggregate, new And(EMPTY, aggregateCondition, conditionB)); @@ -1773,7 +1768,7 @@ public void testPushDownFilterThroughAgg() throws Exception { public void testIsNotNullOnIsNullField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); Expression inn = isNotNull(fieldA); Filter f = new Filter(EMPTY, relation, inn); @@ -1782,7 +1777,7 @@ public void testIsNotNullOnIsNullField() { public void testIsNotNullOnOperatorWithOneField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); Expression inn = isNotNull(new Add(EMPTY, fieldA, ONE)); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); @@ -1792,8 +1787,8 @@ public void testIsNotNullOnOperatorWithOneField() { public void testIsNotNullOnOperatorWithTwoFields() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); - var fieldB = getFieldAttribute("b"); + var fieldA = TestUtils.getFieldAttribute("a"); + var fieldB = TestUtils.getFieldAttribute("b"); Expression inn = isNotNull(new Add(EMPTY, fieldA, fieldB)); Filter f = new Filter(EMPTY, relation, inn); Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); @@ -1803,7 +1798,7 @@ public void testIsNotNullOnOperatorWithTwoFields() { public void testIsNotNullOnFunctionWithOneField() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); + var fieldA = TestUtils.getFieldAttribute("a"); var pattern = L("abc"); Expression inn = isNotNull( new And(EMPTY, new TestStartsWith(EMPTY, fieldA, pattern, false), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE)) @@ -1817,8 +1812,8 @@ public void testIsNotNullOnFunctionWithOneField() { public void testIsNotNullOnFunctionWithTwoFields() { EsRelation relation = relation(); - var fieldA = getFieldAttribute("a"); - var fieldB = getFieldAttribute("b"); + var fieldA = TestUtils.getFieldAttribute("a"); + var fieldB = TestUtils.getFieldAttribute("b"); var pattern = L("abc"); Expression inn = isNotNull(new TestStartsWith(EMPTY, fieldA, fieldB, false)); diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java index 8ff7cac54bf5c..3159c6ea41547 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java @@ -82,6 +82,7 @@ import static org.elasticsearch.test.ESTestCase.randomZone; import static org.elasticsearch.xpack.ql.TestUtils.StringContainsRegex.containsRegex; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; +import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; @@ -445,6 +446,14 @@ else if (query == null) { return arr; } + public static FieldAttribute getFieldAttribute(String name) { + return getFieldAttribute(name, INTEGER); + } + + public static FieldAttribute getFieldAttribute(String name, DataType dataType) { + return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true)); + } + // Matcher which extends the functionality of org.hamcrest.Matchers.matchesPattern(String)} // by allowing to match detected regex groups later on in the pattern, e.g.: // "(?.+?)"....... \k....."} From 49d40c34f7acf5d0a027c36f7961c24bc27cce03 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Thu, 21 Mar 2024 10:48:47 -0400 Subject: [PATCH 094/214] Complete placeholder AtomicRegister Coordinator tests (#106512) Porting over Coordinator tests to Stateless/AtomisRegisterCoordinatorTests --- .../cluster/coordination/Coordinator.java | 20 +++++++ .../AtomicRegisterCoordinatorTests.java | 26 +++++--- .../coordination/CoordinatorTests.java | 59 +++++++++++++++---- .../AbstractCoordinatorTestCase.java | 10 ++++ .../elasticsearch/test/MockLogAppender.java | 8 +++ 5 files changed, 105 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index d1092b0390dff..fc7eaa97c677b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -504,6 +504,13 @@ private void closePrevotingRound() { } } + /** + * Updates {@link #maxTermSeen} if greater. + * + * Every time a new term is found, either from another node requesting election, or this node trying to run for election, always update + * the max term number. The max term may not reflect an actual election, but rather an election attempt by some node in the + * cluster. + */ private void updateMaxTermSeen(final long term) { synchronized (mutex) { maxTermSeen = Math.max(maxTermSeen, term); @@ -549,6 +556,13 @@ private void startElection() { } } + /** + * Broadcasts a request to all 'discoveredNodes' in the cluster to elect 'candidateMasterNode' as the new master. + * + * @param candidateMasterNode the node running for election + * @param term the new proposed master term + * @param discoveredNodes all the nodes to which to send the request + */ private void broadcastStartJoinRequest(DiscoveryNode candidateMasterNode, long term, List discoveredNodes) { electionStrategy.onNewElection(candidateMasterNode, term, new ActionListener<>() { @Override @@ -670,6 +684,9 @@ public void onFailure(Exception e) { }); } + /** + * Validates a request to join the new cluster. Runs on the candidate node running for election to master. + */ private void validateJoinRequest(JoinRequest joinRequest, ActionListener validateListener) { // Before letting the node join the cluster, ensure: @@ -753,6 +770,9 @@ private void sendJoinPing(DiscoveryNode discoveryNode, TransportRequestOptions.T ); } + /** + * Processes the request to join the cluster. Received by the node running for election to master. + */ private void processJoinRequest(JoinRequest joinRequest, ActionListener joinListener) { assert Transports.assertNotTransportThread("blocking on coordinator mutex and maybe doing IO to increase term"); final Optional optionalJoin = joinRequest.getOptionalJoin(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java index 92f2f5c41d1a6..b453abd97ec84 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/AtomicRegisterCoordinatorTests.java @@ -45,6 +45,13 @@ import static org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService.HEARTBEAT_FREQUENCY; import static org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService.MAX_MISSED_HEARTBEATS; +/** + * Tests that the Coordinator code runs correctly relying on atomic register compare-and-swap. Stateless will use implementations of atomic + * register CAS in the cloud blob stores. + * + * StatelessCoordinationTests extends AtomicRegisterCoordinatorTests for testing, inheriting all the tests but using different + * {@link ElectionStrategy} implementations, etc. + */ @TestLogging(reason = "these tests do a lot of log-worthy things but we usually don't care", value = "org.elasticsearch:FATAL") public class AtomicRegisterCoordinatorTests extends CoordinatorTests { @@ -86,23 +93,23 @@ public void testAckListenerReceivesNacksIfLeaderStandsDown() { } @Override - @AwaitsFix(bugUrl = "ES-5645") public void testAckListenerReceivesNacksIfPublicationTimesOut() { // The leader still has access to the register, therefore it acknowledges the state update + testAckListenerReceivesNacksIfPublicationTimesOut(true); } @Override - @AwaitsFix(bugUrl = "ES-5645") - public void testClusterCannotFormWithFailingJoinValidation() { - // A single node can form a cluster in this case + public void testClusterCannotFormWithFailingJoinValidation() throws Exception { + // A single node can form a cluster if it is able to join (vote for) its own cluster, so we must disable all nodes from successfully + // joining a cluster. + clusterCannotFormWithFailingJoinValidation(true); } @Override - @AwaitsFix(bugUrl = "ES-5645") + @AwaitsFix(bugUrl = "ES-8099") public void testCannotJoinClusterWithDifferentUUID() { - // The cluster2 leader is considered dead since we only run the nodes in cluster 1 - // therefore the node coming from cluster 2 ends up taking over the old master in cluster 2 - // TODO: add more checks to avoid forming a mixed cluster between register based and traditional clusters + // Placeholder to implement a test wherein the blob store cluster state is suddenly swapped out with a different cluster's state + // with a different UUID. The cluster nodes should recognize the UUID change and refuse to load the foreign cluster state. } @Override @@ -192,6 +199,9 @@ protected CoordinatorStrategy createCoordinatorStrategy() { return new AtomicRegisterCoordinatorStrategy(); } + /** + * Strategy used to inject custom behavior into the {@link AbstractCoordinatorTestCase} test infrastructure. + */ class AtomicRegisterCoordinatorStrategy implements CoordinatorStrategy { private final AtomicLong currentTermRef = new AtomicLong(); private final AtomicReference heartBeatRef = new AtomicReference<>(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java index 2985cd33aaa64..f816b6ff6571c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinatorTests.java @@ -93,7 +93,6 @@ @TestLogging(reason = "these tests do a lot of log-worthy things but we usually don't care", value = "org.elasticsearch:FATAL") public class CoordinatorTests extends AbstractCoordinatorTestCase { - public void testCanUpdateClusterStateAfterStabilisation() { try (Cluster cluster = new Cluster(randomIntBetween(1, 5))) { cluster.runRandomly(); @@ -637,6 +636,10 @@ public void testAckListenerReceivesNoAckFromHangingFollower() { } public void testAckListenerReceivesNacksIfPublicationTimesOut() { + testAckListenerReceivesNacksIfPublicationTimesOut(false); + } + + protected void testAckListenerReceivesNacksIfPublicationTimesOut(boolean expectLeaderAcksSuccessfullyInStateless) { try (Cluster cluster = new Cluster(3)) { cluster.runRandomly(); cluster.stabilise(); @@ -651,12 +654,19 @@ public void testAckListenerReceivesNacksIfPublicationTimesOut() { assertFalse("expected no immediate ack from " + leader, ackCollector.hasAcked(leader)); assertFalse("expected no immediate ack from " + follower0, ackCollector.hasAcked(follower0)); assertFalse("expected no immediate ack from " + follower1, ackCollector.hasAcked(follower1)); + follower0.heal(); follower1.heal(); cluster.stabilise(); assertTrue("expected eventual nack from " + follower0, ackCollector.hasAckedUnsuccessfully(follower0)); assertTrue("expected eventual nack from " + follower1, ackCollector.hasAckedUnsuccessfully(follower1)); - assertTrue("expected eventual nack from " + leader, ackCollector.hasAckedUnsuccessfully(leader)); + if (expectLeaderAcksSuccessfullyInStateless) { + // A stateless leader directly updates the cluster state in the remote blob store: it does not require communication with + // the other cluster nodes to procceed with an update commit to the cluster state. + assertTrue("expected ack from leader, " + leader, ackCollector.hasAckedSuccessfully(leader)); + } else { + assertTrue("expected eventual nack from leader, " + leader, ackCollector.hasAckedUnsuccessfully(leader)); + } } } @@ -1271,21 +1281,50 @@ public void testNodeCannotJoinIfJoinValidationFailsOnJoiningNode() { } } - public void testClusterCannotFormWithFailingJoinValidation() { + public void testClusterCannotFormWithFailingJoinValidation() throws Exception { + clusterCannotFormWithFailingJoinValidation(false); + } + + /** + * Forms a random sized cluster and then disables join validation on either a random majority subset or all cluster nodes. Then checks + * that election fails. + * + * @param failJoinOnAllNodes this controls whether to fail join on all nodes or only a majority subset. The atomic register CAS election + * strategy will succeed in electing a master if any node can vote (even the master candidate voting for + * itself). + * @throws Exception + */ + protected void clusterCannotFormWithFailingJoinValidation(boolean failJoinOnAllNodes) throws Exception { try (Cluster cluster = new Cluster(randomIntBetween(1, 5))) { - // fail join validation on a majority of nodes in the initial configuration - randomValueOtherThanMany( - nodes -> cluster.initialConfiguration.hasQuorum( - nodes.stream().map(ClusterNode::getLocalNode).map(DiscoveryNode::getId).collect(Collectors.toSet()) - ) == false, - () -> randomSubsetOf(cluster.clusterNodes) - ).forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { + List clusterNodesToFailJoin; + if (failJoinOnAllNodes) { + // The AtomicRegister strategy succeeds if a master candidate votes for itself, so we must disable all nodes from passing + // join validation so that none of them can self-elect. + clusterNodesToFailJoin = cluster.clusterNodes; + } else { + // Fetch a random subset of cluster nodes that form a quorum (majority subset). + clusterNodesToFailJoin = randomValueOtherThanMany( + nodes -> cluster.initialConfiguration.hasQuorum( + nodes.stream().map(ClusterNode::getLocalNode).map(DiscoveryNode::getId).collect(Collectors.toSet()) + ) == false, + () -> randomSubsetOf(cluster.clusterNodes) + ); + } + + // Fail join validation on the set of nodes so that election will fail in the initial configuration. + clusterNodesToFailJoin.forEach(cn -> cn.extraJoinValidators.add((discoveryNode, clusterState) -> { throw new IllegalArgumentException("join validation failed"); })); + cluster.bootstrapIfNecessary(); + + // Run the cluster for 10 seconds to give the cluster time to elect a master. + // It's possible stabilisation takes longer, but essentially impossible that it _always_ takes longer. cluster.runFor(10000, "failing join validation"); + assertTrue(cluster.clusterNodes.stream().allMatch(cn -> cn.getLastAppliedClusterState().version() == 0)); + // Now clear the validation failures and verify that the cluster stabilizes. for (ClusterNode clusterNode : cluster.clusterNodes) { clusterNode.extraJoinValidators.clear(); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 7f39120e83c07..4e43cb33111a1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -240,6 +240,10 @@ protected static int defaultInt(Setting setting) { // Then a commit of the new leader's first cluster state + DEFAULT_CLUSTER_STATE_UPDATE_DELAY; + /** + * An estimate for the max time needed to stabilize a cluster. Takes into account delays for various communications involved in + * leader elections. + * */ public static final long DEFAULT_STABILISATION_TIME = // If leader just blackholed, need to wait for this to be detected (defaultMillis(LEADER_CHECK_INTERVAL_SETTING) + defaultMillis(LEADER_CHECK_TIMEOUT_SETTING)) * defaultInt( @@ -549,6 +553,9 @@ private void updateCommittedStates() { } } + /** + * Uses a default period of time in which to wait for cluster stabilisation, and then verifies that a master has been elected. + */ public void stabilise() { stabilise(DEFAULT_STABILISATION_TIME, true); } @@ -942,6 +949,9 @@ protected long transportDelayMillis(String actionName) { return 0; } + /** + * Mimics a cluster node for testing. + */ public final class ClusterNode { private static final Logger logger = LogManager.getLogger(ClusterNode.class); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 5e63dd2edefea..10a3a8a78e483 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -240,10 +240,18 @@ public String toString() { } } + /** + * Adds the list of class loggers to this {@link MockLogAppender}. + * + * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + */ public Releasable capturing(Class... classes) { return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); } + /** + * Same as above except takes string class names of each logger. + */ public Releasable capturing(String... names) { return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); } From 34899069b6f583b393715c0a6abf06e9d5c50647 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Mar 2024 10:51:35 -0400 Subject: [PATCH 095/214] ESQL: Generate a few more docs (#106577) And improve the error message on csv test failures. --- docs/reference/esql/functions/asin.asciidoc | 31 ----------------- docs/reference/esql/functions/atan.asciidoc | 31 ----------------- docs/reference/esql/functions/atan2.asciidoc | 34 ------------------- .../esql/functions/description/abs.asciidoc | 2 +- .../esql/functions/description/acos.asciidoc | 2 +- .../esql/functions/description/asin.asciidoc | 4 +-- .../esql/functions/description/atan.asciidoc | 4 +-- .../esql/functions/description/atan2.asciidoc | 4 +-- .../description/auto_bucket.asciidoc | 2 +- .../esql/functions/description/case.asciidoc | 2 +- .../esql/functions/description/ceil.asciidoc | 2 +- .../functions/description/coalesce.asciidoc | 2 +- .../functions/description/concat.asciidoc | 2 +- .../esql/functions/description/cos.asciidoc | 2 +- .../esql/functions/description/cosh.asciidoc | 2 +- .../functions/description/date_diff.asciidoc | 2 +- .../description/date_extract.asciidoc | 2 +- .../functions/description/date_parse.asciidoc | 2 +- .../esql/functions/description/e.asciidoc | 2 +- .../functions/description/ends_with.asciidoc | 2 +- .../esql/functions/description/floor.asciidoc | 2 +- .../functions/description/greatest.asciidoc | 2 +- .../esql/functions/description/least.asciidoc | 2 +- .../esql/functions/description/left.asciidoc | 2 +- .../functions/description/length.asciidoc | 2 +- .../esql/functions/description/log.asciidoc | 2 +- .../esql/functions/description/log10.asciidoc | 2 +- .../esql/functions/description/ltrim.asciidoc | 2 +- .../functions/description/mv_avg.asciidoc | 2 +- .../functions/description/mv_concat.asciidoc | 2 +- .../functions/description/mv_count.asciidoc | 2 +- .../functions/description/mv_dedupe.asciidoc | 2 +- .../functions/description/mv_first.asciidoc | 2 +- .../functions/description/mv_last.asciidoc | 2 +- .../functions/description/mv_max.asciidoc | 2 +- .../functions/description/mv_median.asciidoc | 2 +- .../functions/description/mv_min.asciidoc | 2 +- .../functions/description/mv_slice.asciidoc | 2 +- .../functions/description/mv_sort.asciidoc | 2 +- .../functions/description/mv_sum.asciidoc | 2 +- .../functions/description/mv_zip.asciidoc | 2 +- .../esql/functions/description/pi.asciidoc | 2 +- .../esql/functions/description/pow.asciidoc | 2 +- .../functions/description/replace.asciidoc | 2 +- .../esql/functions/description/right.asciidoc | 2 +- .../esql/functions/description/round.asciidoc | 2 +- .../esql/functions/description/rtrim.asciidoc | 2 +- .../esql/functions/description/sin.asciidoc | 2 +- .../esql/functions/description/sinh.asciidoc | 2 +- .../esql/functions/description/split.asciidoc | 2 +- .../esql/functions/description/sqrt.asciidoc | 2 +- .../description/st_intersects.asciidoc | 2 +- .../esql/functions/description/st_x.asciidoc | 2 +- .../esql/functions/description/st_y.asciidoc | 2 +- .../description/starts_with.asciidoc | 2 +- .../functions/description/substring.asciidoc | 2 +- .../esql/functions/description/tan.asciidoc | 2 +- .../esql/functions/description/tanh.asciidoc | 2 +- .../esql/functions/description/tau.asciidoc | 2 +- .../functions/description/to_boolean.asciidoc | 2 +- .../description/to_cartesianpoint.asciidoc | 2 +- .../description/to_cartesianshape.asciidoc | 2 +- .../description/to_datetime.asciidoc | 2 +- .../functions/description/to_degrees.asciidoc | 2 +- .../functions/description/to_double.asciidoc | 2 +- .../description/to_geopoint.asciidoc | 2 +- .../description/to_geoshape.asciidoc | 2 +- .../functions/description/to_integer.asciidoc | 2 +- .../esql/functions/description/to_ip.asciidoc | 2 +- .../functions/description/to_long.asciidoc | 2 +- .../functions/description/to_lower.asciidoc | 2 +- .../functions/description/to_radians.asciidoc | 2 +- .../functions/description/to_string.asciidoc | 2 +- .../description/to_unsigned_long.asciidoc | 2 +- .../functions/description/to_upper.asciidoc | 2 +- .../functions/description/to_version.asciidoc | 2 +- .../esql/functions/description/trim.asciidoc | 2 +- .../esql/functions/examples/abs.asciidoc | 2 +- .../esql/functions/examples/acos.asciidoc | 2 +- .../esql/functions/examples/asin.asciidoc | 13 +++++++ .../esql/functions/examples/atan.asciidoc | 13 +++++++ .../esql/functions/examples/atan2.asciidoc | 13 +++++++ .../esql/functions/examples/coalesce.asciidoc | 2 +- .../esql/functions/examples/left.asciidoc | 2 +- .../esql/functions/layout/abs.asciidoc | 2 +- .../esql/functions/layout/acos.asciidoc | 2 +- .../esql/functions/layout/asin.asciidoc | 3 +- .../esql/functions/layout/atan.asciidoc | 3 +- .../esql/functions/layout/atan2.asciidoc | 3 +- .../functions/layout/auto_bucket.asciidoc | 2 +- .../esql/functions/layout/case.asciidoc | 2 +- .../esql/functions/layout/ceil.asciidoc | 2 +- .../esql/functions/layout/coalesce.asciidoc | 2 +- .../esql/functions/layout/concat.asciidoc | 2 +- .../esql/functions/layout/cos.asciidoc | 2 +- .../esql/functions/layout/cosh.asciidoc | 2 +- .../esql/functions/layout/date_diff.asciidoc | 2 +- .../functions/layout/date_extract.asciidoc | 2 +- .../esql/functions/layout/date_parse.asciidoc | 2 +- .../esql/functions/layout/e.asciidoc | 2 +- .../esql/functions/layout/ends_with.asciidoc | 2 +- .../esql/functions/layout/floor.asciidoc | 2 +- .../esql/functions/layout/greatest.asciidoc | 2 +- .../esql/functions/layout/least.asciidoc | 2 +- .../esql/functions/layout/left.asciidoc | 2 +- .../esql/functions/layout/length.asciidoc | 2 +- .../esql/functions/layout/log.asciidoc | 2 +- .../esql/functions/layout/log10.asciidoc | 2 +- .../esql/functions/layout/ltrim.asciidoc | 2 +- .../esql/functions/layout/mv_avg.asciidoc | 2 +- .../esql/functions/layout/mv_concat.asciidoc | 2 +- .../esql/functions/layout/mv_count.asciidoc | 2 +- .../esql/functions/layout/mv_dedupe.asciidoc | 2 +- .../esql/functions/layout/mv_first.asciidoc | 2 +- .../esql/functions/layout/mv_last.asciidoc | 2 +- .../esql/functions/layout/mv_max.asciidoc | 2 +- .../esql/functions/layout/mv_median.asciidoc | 2 +- .../esql/functions/layout/mv_min.asciidoc | 2 +- .../esql/functions/layout/mv_slice.asciidoc | 2 +- .../esql/functions/layout/mv_sort.asciidoc | 2 +- .../esql/functions/layout/mv_sum.asciidoc | 2 +- .../esql/functions/layout/mv_zip.asciidoc | 2 +- .../esql/functions/layout/pi.asciidoc | 2 +- .../esql/functions/layout/pow.asciidoc | 2 +- .../esql/functions/layout/replace.asciidoc | 2 +- .../esql/functions/layout/right.asciidoc | 2 +- .../esql/functions/layout/round.asciidoc | 2 +- .../esql/functions/layout/rtrim.asciidoc | 2 +- .../esql/functions/layout/sin.asciidoc | 2 +- .../esql/functions/layout/sinh.asciidoc | 2 +- .../esql/functions/layout/split.asciidoc | 2 +- .../esql/functions/layout/sqrt.asciidoc | 2 +- .../functions/layout/st_intersects.asciidoc | 2 +- .../esql/functions/layout/st_x.asciidoc | 2 +- .../esql/functions/layout/st_y.asciidoc | 2 +- .../functions/layout/starts_with.asciidoc | 2 +- .../esql/functions/layout/substring.asciidoc | 2 +- .../esql/functions/layout/tan.asciidoc | 2 +- .../esql/functions/layout/tanh.asciidoc | 2 +- .../esql/functions/layout/tau.asciidoc | 2 +- .../esql/functions/layout/to_boolean.asciidoc | 2 +- .../layout/to_cartesianpoint.asciidoc | 2 +- .../layout/to_cartesianshape.asciidoc | 2 +- .../functions/layout/to_datetime.asciidoc | 2 +- .../esql/functions/layout/to_degrees.asciidoc | 2 +- .../esql/functions/layout/to_double.asciidoc | 2 +- .../functions/layout/to_geopoint.asciidoc | 2 +- .../functions/layout/to_geoshape.asciidoc | 2 +- .../esql/functions/layout/to_integer.asciidoc | 2 +- .../esql/functions/layout/to_ip.asciidoc | 2 +- .../esql/functions/layout/to_long.asciidoc | 2 +- .../esql/functions/layout/to_lower.asciidoc | 2 +- .../esql/functions/layout/to_radians.asciidoc | 2 +- .../esql/functions/layout/to_string.asciidoc | 2 +- .../layout/to_unsigned_long.asciidoc | 2 +- .../esql/functions/layout/to_upper.asciidoc | 2 +- .../esql/functions/layout/to_version.asciidoc | 2 +- .../esql/functions/layout/trim.asciidoc | 2 +- .../esql/functions/math-functions.asciidoc | 6 ++-- .../esql/functions/parameters/asin.asciidoc | 2 +- .../esql/functions/parameters/atan.asciidoc | 2 +- .../esql/functions/parameters/atan2.asciidoc | 4 +-- .../esql/functions/types/abs.asciidoc | 2 +- .../esql/functions/types/acos.asciidoc | 2 +- .../esql/functions/types/add.asciidoc | 2 +- .../esql/functions/types/asin.asciidoc | 2 +- .../esql/functions/types/atan.asciidoc | 2 +- .../esql/functions/types/atan2.asciidoc | 2 +- .../esql/functions/types/auto_bucket.asciidoc | 2 +- .../esql/functions/types/case.asciidoc | 2 +- .../esql/functions/types/ceil.asciidoc | 2 +- .../esql/functions/types/coalesce.asciidoc | 2 +- .../esql/functions/types/concat.asciidoc | 2 +- .../esql/functions/types/cos.asciidoc | 2 +- .../esql/functions/types/cosh.asciidoc | 2 +- .../esql/functions/types/date_diff.asciidoc | 2 +- .../functions/types/date_extract.asciidoc | 2 +- .../esql/functions/types/date_parse.asciidoc | 2 +- .../esql/functions/types/div.asciidoc | 2 +- .../reference/esql/functions/types/e.asciidoc | 2 +- .../esql/functions/types/ends_with.asciidoc | 2 +- .../esql/functions/types/equals.asciidoc | 2 +- .../esql/functions/types/floor.asciidoc | 2 +- .../functions/types/greater_than.asciidoc | 2 +- .../types/greater_than_or_equal.asciidoc | 2 +- .../esql/functions/types/greatest.asciidoc | 2 +- .../esql/functions/types/least.asciidoc | 2 +- .../esql/functions/types/left.asciidoc | 2 +- .../esql/functions/types/length.asciidoc | 2 +- .../esql/functions/types/less_than.asciidoc | 2 +- .../types/less_than_or_equal.asciidoc | 2 +- .../esql/functions/types/log.asciidoc | 2 +- .../esql/functions/types/log10.asciidoc | 2 +- .../esql/functions/types/ltrim.asciidoc | 2 +- .../esql/functions/types/mod.asciidoc | 2 +- .../esql/functions/types/mul.asciidoc | 2 +- .../esql/functions/types/mv_avg.asciidoc | 2 +- .../esql/functions/types/mv_concat.asciidoc | 2 +- .../esql/functions/types/mv_count.asciidoc | 2 +- .../esql/functions/types/mv_dedupe.asciidoc | 2 +- .../esql/functions/types/mv_first.asciidoc | 2 +- .../esql/functions/types/mv_last.asciidoc | 2 +- .../esql/functions/types/mv_max.asciidoc | 2 +- .../esql/functions/types/mv_median.asciidoc | 2 +- .../esql/functions/types/mv_min.asciidoc | 2 +- .../esql/functions/types/mv_slice.asciidoc | 2 +- .../esql/functions/types/mv_sort.asciidoc | 2 +- .../esql/functions/types/mv_sum.asciidoc | 2 +- .../esql/functions/types/mv_zip.asciidoc | 2 +- .../esql/functions/types/neg.asciidoc | 2 +- .../esql/functions/types/not_equals.asciidoc | 2 +- .../esql/functions/types/pi.asciidoc | 2 +- .../esql/functions/types/pow.asciidoc | 2 +- .../esql/functions/types/replace.asciidoc | 2 +- .../esql/functions/types/right.asciidoc | 2 +- .../esql/functions/types/round.asciidoc | 2 +- .../esql/functions/types/rtrim.asciidoc | 2 +- .../esql/functions/types/sin.asciidoc | 2 +- .../esql/functions/types/sinh.asciidoc | 2 +- .../esql/functions/types/split.asciidoc | 2 +- .../esql/functions/types/sqrt.asciidoc | 2 +- .../functions/types/st_intersects.asciidoc | 2 +- .../esql/functions/types/st_x.asciidoc | 2 +- .../esql/functions/types/st_y.asciidoc | 2 +- .../esql/functions/types/starts_with.asciidoc | 2 +- .../esql/functions/types/sub.asciidoc | 2 +- .../esql/functions/types/substring.asciidoc | 2 +- .../esql/functions/types/tan.asciidoc | 2 +- .../esql/functions/types/tanh.asciidoc | 2 +- .../esql/functions/types/tau.asciidoc | 2 +- .../esql/functions/types/to_boolean.asciidoc | 2 +- .../types/to_cartesianpoint.asciidoc | 2 +- .../types/to_cartesianshape.asciidoc | 2 +- .../esql/functions/types/to_datetime.asciidoc | 2 +- .../esql/functions/types/to_degrees.asciidoc | 2 +- .../esql/functions/types/to_double.asciidoc | 2 +- .../esql/functions/types/to_geopoint.asciidoc | 2 +- .../esql/functions/types/to_geoshape.asciidoc | 2 +- .../esql/functions/types/to_integer.asciidoc | 2 +- .../esql/functions/types/to_ip.asciidoc | 2 +- .../esql/functions/types/to_long.asciidoc | 2 +- .../esql/functions/types/to_lower.asciidoc | 2 +- .../esql/functions/types/to_radians.asciidoc | 2 +- .../esql/functions/types/to_string.asciidoc | 2 +- .../functions/types/to_unsigned_long.asciidoc | 2 +- .../esql/functions/types/to_upper.asciidoc | 2 +- .../esql/functions/types/to_version.asciidoc | 2 +- .../esql/functions/types/trim.asciidoc | 2 +- .../elasticsearch/xpack/esql/CsvAssert.java | 23 +++++++++++-- .../src/main/resources/meta.csv-spec | 10 +++--- .../expression/function/scalar/math/Asin.java | 10 ++++-- .../expression/function/scalar/math/Atan.java | 14 ++++++-- .../function/scalar/math/Atan2.java | 17 ++++++++-- .../function/AbstractFunctionTestCase.java | 27 +++++++-------- 254 files changed, 361 insertions(+), 374 deletions(-) delete mode 100644 docs/reference/esql/functions/asin.asciidoc delete mode 100644 docs/reference/esql/functions/atan.asciidoc delete mode 100644 docs/reference/esql/functions/atan2.asciidoc create mode 100644 docs/reference/esql/functions/examples/asin.asciidoc create mode 100644 docs/reference/esql/functions/examples/atan.asciidoc create mode 100644 docs/reference/esql/functions/examples/atan2.asciidoc diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc deleted file mode 100644 index 3c97a89435d73..0000000000000 --- a/docs/reference/esql/functions/asin.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-asin]] -=== `ASIN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/asin.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input -numeric expression as an angle, expressed in radians. - -include::types/asin.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=asin] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=asin-result] -|=== diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc deleted file mode 100644 index a662814cfe56c..0000000000000 --- a/docs/reference/esql/functions/atan.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-atan]] -=== `ATAN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/atan.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input -numeric expression as an angle, expressed in radians. - -include::types/atan.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=atan] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=atan-result] -|=== diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc deleted file mode 100644 index cc28b46339012..0000000000000 --- a/docs/reference/esql/functions/atan2.asciidoc +++ /dev/null @@ -1,34 +0,0 @@ -[discrete] -[[esql-atan2]] -=== `ATAN2` - -*Syntax* - -[.text-center] -image::esql/functions/signature/atan2.svg[Embedded,opts=inline] - -*Parameters* - -`y`:: -Numeric expression. If `null`, the function returns `null`. - -`x`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the -origin to the point (x , y) in the Cartesian plane, expressed in radians. - -include::types/atan2.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=atan2] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=atan2-result] -|=== diff --git a/docs/reference/esql/functions/description/abs.asciidoc b/docs/reference/esql/functions/description/abs.asciidoc index b2c765547cac3..1070c024d53a2 100644 --- a/docs/reference/esql/functions/description/abs.asciidoc +++ b/docs/reference/esql/functions/description/abs.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/acos.asciidoc b/docs/reference/esql/functions/description/acos.asciidoc index a393c369d9429..d31016b4dd565 100644 --- a/docs/reference/esql/functions/description/acos.asciidoc +++ b/docs/reference/esql/functions/description/acos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/asin.asciidoc b/docs/reference/esql/functions/description/asin.asciidoc index 7fdfde219cac2..e2cd7302243c1 100644 --- a/docs/reference/esql/functions/description/asin.asciidoc +++ b/docs/reference/esql/functions/description/asin.asciidoc @@ -1,5 +1,5 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* -Inverse sine trigonometric function. +Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/atan.asciidoc b/docs/reference/esql/functions/description/atan.asciidoc index f1e4c3fe90a9c..650632bcdfd2e 100644 --- a/docs/reference/esql/functions/description/atan.asciidoc +++ b/docs/reference/esql/functions/description/atan.asciidoc @@ -1,5 +1,5 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* -Inverse tangent trigonometric function. +Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. diff --git a/docs/reference/esql/functions/description/atan2.asciidoc b/docs/reference/esql/functions/description/atan2.asciidoc index 8642f404fc2cb..5d7bb4cdda63b 100644 --- a/docs/reference/esql/functions/description/atan2.asciidoc +++ b/docs/reference/esql/functions/description/atan2.asciidoc @@ -1,5 +1,5 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* -The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane. +The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. diff --git a/docs/reference/esql/functions/description/auto_bucket.asciidoc b/docs/reference/esql/functions/description/auto_bucket.asciidoc index 2be3aa8943e97..0c1d9d3ea1ffd 100644 --- a/docs/reference/esql/functions/description/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/description/auto_bucket.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc index 7deb8566a630f..5c98a7a2620d0 100644 --- a/docs/reference/esql/functions/description/case.asciidoc +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/ceil.asciidoc b/docs/reference/esql/functions/description/ceil.asciidoc index ed7ef6b581950..db4456896b47b 100644 --- a/docs/reference/esql/functions/description/ceil.asciidoc +++ b/docs/reference/esql/functions/description/ceil.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/coalesce.asciidoc b/docs/reference/esql/functions/description/coalesce.asciidoc index 3cab2bac462b7..e85b4e04eeaaa 100644 --- a/docs/reference/esql/functions/description/coalesce.asciidoc +++ b/docs/reference/esql/functions/description/coalesce.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/concat.asciidoc b/docs/reference/esql/functions/description/concat.asciidoc index 4523b8eb166cd..a8c136c6336dd 100644 --- a/docs/reference/esql/functions/description/concat.asciidoc +++ b/docs/reference/esql/functions/description/concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/cos.asciidoc b/docs/reference/esql/functions/description/cos.asciidoc index 394ff038b112f..e46d651b34c00 100644 --- a/docs/reference/esql/functions/description/cos.asciidoc +++ b/docs/reference/esql/functions/description/cos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/cosh.asciidoc b/docs/reference/esql/functions/description/cosh.asciidoc index 50036dd1a1c18..deaf780addb93 100644 --- a/docs/reference/esql/functions/description/cosh.asciidoc +++ b/docs/reference/esql/functions/description/cosh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/date_diff.asciidoc b/docs/reference/esql/functions/description/date_diff.asciidoc index 53759a8c56158..d48c7d492cb68 100644 --- a/docs/reference/esql/functions/description/date_diff.asciidoc +++ b/docs/reference/esql/functions/description/date_diff.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/date_extract.asciidoc b/docs/reference/esql/functions/description/date_extract.asciidoc index e9f13fdba89a8..e1c68d34d9266 100644 --- a/docs/reference/esql/functions/description/date_extract.asciidoc +++ b/docs/reference/esql/functions/description/date_extract.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/date_parse.asciidoc b/docs/reference/esql/functions/description/date_parse.asciidoc index 75eb2490cff93..6360f9f3e55d7 100644 --- a/docs/reference/esql/functions/description/date_parse.asciidoc +++ b/docs/reference/esql/functions/description/date_parse.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/e.asciidoc b/docs/reference/esql/functions/description/e.asciidoc index b6804212124c9..787de53c32ef6 100644 --- a/docs/reference/esql/functions/description/e.asciidoc +++ b/docs/reference/esql/functions/description/e.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/ends_with.asciidoc b/docs/reference/esql/functions/description/ends_with.asciidoc index 93b8873fbf56f..8695a0467d683 100644 --- a/docs/reference/esql/functions/description/ends_with.asciidoc +++ b/docs/reference/esql/functions/description/ends_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/floor.asciidoc b/docs/reference/esql/functions/description/floor.asciidoc index ecea344dd35cd..139b8b57dafb9 100644 --- a/docs/reference/esql/functions/description/floor.asciidoc +++ b/docs/reference/esql/functions/description/floor.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc index 86debd63f4ff9..3c7cfd3bfb14c 100644 --- a/docs/reference/esql/functions/description/greatest.asciidoc +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc index 6f403e9cb8574..2aeb1f85aa51a 100644 --- a/docs/reference/esql/functions/description/least.asciidoc +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/left.asciidoc b/docs/reference/esql/functions/description/left.asciidoc index fdb76e9ef3acd..bdd34d2d21285 100644 --- a/docs/reference/esql/functions/description/left.asciidoc +++ b/docs/reference/esql/functions/description/left.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/length.asciidoc b/docs/reference/esql/functions/description/length.asciidoc index 4c97428cdf8a0..bf976e3d6e507 100644 --- a/docs/reference/esql/functions/description/length.asciidoc +++ b/docs/reference/esql/functions/description/length.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/log.asciidoc b/docs/reference/esql/functions/description/log.asciidoc index 97a79cbdaee54..9e88e2fa90621 100644 --- a/docs/reference/esql/functions/description/log.asciidoc +++ b/docs/reference/esql/functions/description/log.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/log10.asciidoc b/docs/reference/esql/functions/description/log10.asciidoc index f9c98e1416971..fd5d9ce16ee5b 100644 --- a/docs/reference/esql/functions/description/log10.asciidoc +++ b/docs/reference/esql/functions/description/log10.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/ltrim.asciidoc b/docs/reference/esql/functions/description/ltrim.asciidoc index 163c7629bacea..95e3d316131fe 100644 --- a/docs/reference/esql/functions/description/ltrim.asciidoc +++ b/docs/reference/esql/functions/description/ltrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_avg.asciidoc b/docs/reference/esql/functions/description/mv_avg.asciidoc index d02992e80f1fd..e6ddd6d43502d 100644 --- a/docs/reference/esql/functions/description/mv_avg.asciidoc +++ b/docs/reference/esql/functions/description/mv_avg.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_concat.asciidoc b/docs/reference/esql/functions/description/mv_concat.asciidoc index d59c1ae9dea3e..8c3b24f858604 100644 --- a/docs/reference/esql/functions/description/mv_concat.asciidoc +++ b/docs/reference/esql/functions/description/mv_concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_count.asciidoc b/docs/reference/esql/functions/description/mv_count.asciidoc index 107caf432de22..7f311e6938818 100644 --- a/docs/reference/esql/functions/description/mv_count.asciidoc +++ b/docs/reference/esql/functions/description/mv_count.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_dedupe.asciidoc b/docs/reference/esql/functions/description/mv_dedupe.asciidoc index 6b8aeab1ae2b0..0d8c49f1f77be 100644 --- a/docs/reference/esql/functions/description/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/description/mv_dedupe.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_first.asciidoc b/docs/reference/esql/functions/description/mv_first.asciidoc index ce65b75d2d5e9..01901f19bf1bf 100644 --- a/docs/reference/esql/functions/description/mv_first.asciidoc +++ b/docs/reference/esql/functions/description/mv_first.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_last.asciidoc b/docs/reference/esql/functions/description/mv_last.asciidoc index 77f9aa1501da5..55ad684a80cab 100644 --- a/docs/reference/esql/functions/description/mv_last.asciidoc +++ b/docs/reference/esql/functions/description/mv_last.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_max.asciidoc b/docs/reference/esql/functions/description/mv_max.asciidoc index 6564b9d554d73..b0a725d439698 100644 --- a/docs/reference/esql/functions/description/mv_max.asciidoc +++ b/docs/reference/esql/functions/description/mv_max.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_median.asciidoc b/docs/reference/esql/functions/description/mv_median.asciidoc index 800d22d7b8186..2167142d0c266 100644 --- a/docs/reference/esql/functions/description/mv_median.asciidoc +++ b/docs/reference/esql/functions/description/mv_median.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_min.asciidoc b/docs/reference/esql/functions/description/mv_min.asciidoc index 425c50d5a7eb6..502fce5ce4024 100644 --- a/docs/reference/esql/functions/description/mv_min.asciidoc +++ b/docs/reference/esql/functions/description/mv_min.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_slice.asciidoc b/docs/reference/esql/functions/description/mv_slice.asciidoc index b2a59313f5f73..24d3183b6f40e 100644 --- a/docs/reference/esql/functions/description/mv_slice.asciidoc +++ b/docs/reference/esql/functions/description/mv_slice.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_sort.asciidoc b/docs/reference/esql/functions/description/mv_sort.asciidoc index aa9ba350b7619..ad319a332ab27 100644 --- a/docs/reference/esql/functions/description/mv_sort.asciidoc +++ b/docs/reference/esql/functions/description/mv_sort.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_sum.asciidoc b/docs/reference/esql/functions/description/mv_sum.asciidoc index 8496d734278f4..e38ee29b68123 100644 --- a/docs/reference/esql/functions/description/mv_sum.asciidoc +++ b/docs/reference/esql/functions/description/mv_sum.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/mv_zip.asciidoc b/docs/reference/esql/functions/description/mv_zip.asciidoc index de4b15a5e362f..898cb4e557640 100644 --- a/docs/reference/esql/functions/description/mv_zip.asciidoc +++ b/docs/reference/esql/functions/description/mv_zip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/pi.asciidoc b/docs/reference/esql/functions/description/pi.asciidoc index 47be5503ba369..c2b9b737126e1 100644 --- a/docs/reference/esql/functions/description/pi.asciidoc +++ b/docs/reference/esql/functions/description/pi.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/pow.asciidoc b/docs/reference/esql/functions/description/pow.asciidoc index 5a8f2563f1e33..fd05421eae005 100644 --- a/docs/reference/esql/functions/description/pow.asciidoc +++ b/docs/reference/esql/functions/description/pow.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/replace.asciidoc b/docs/reference/esql/functions/description/replace.asciidoc index e8df184a4c0a2..e621526925870 100644 --- a/docs/reference/esql/functions/description/replace.asciidoc +++ b/docs/reference/esql/functions/description/replace.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/right.asciidoc b/docs/reference/esql/functions/description/right.asciidoc index e14308df21547..e67003f8376d8 100644 --- a/docs/reference/esql/functions/description/right.asciidoc +++ b/docs/reference/esql/functions/description/right.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/round.asciidoc b/docs/reference/esql/functions/description/round.asciidoc index 1c3aefd487ac4..3dfec1b30565d 100644 --- a/docs/reference/esql/functions/description/round.asciidoc +++ b/docs/reference/esql/functions/description/round.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/rtrim.asciidoc b/docs/reference/esql/functions/description/rtrim.asciidoc index 3041427026375..7458f8dd44f9a 100644 --- a/docs/reference/esql/functions/description/rtrim.asciidoc +++ b/docs/reference/esql/functions/description/rtrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/sin.asciidoc b/docs/reference/esql/functions/description/sin.asciidoc index 0013fe5b17757..4a5f04732fccc 100644 --- a/docs/reference/esql/functions/description/sin.asciidoc +++ b/docs/reference/esql/functions/description/sin.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/sinh.asciidoc b/docs/reference/esql/functions/description/sinh.asciidoc index 46385f454fd74..a51b88c7d446e 100644 --- a/docs/reference/esql/functions/description/sinh.asciidoc +++ b/docs/reference/esql/functions/description/sinh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/split.asciidoc b/docs/reference/esql/functions/description/split.asciidoc index 1344d34e8f7b7..b1b32c1d4de5e 100644 --- a/docs/reference/esql/functions/description/split.asciidoc +++ b/docs/reference/esql/functions/description/split.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/sqrt.asciidoc b/docs/reference/esql/functions/description/sqrt.asciidoc index 6e034a154bb8d..0963e0041280b 100644 --- a/docs/reference/esql/functions/description/sqrt.asciidoc +++ b/docs/reference/esql/functions/description/sqrt.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index f22a559baad3a..b736ba29a6c8b 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc index 0e6ebc1a5ab63..beb077bea332c 100644 --- a/docs/reference/esql/functions/description/st_x.asciidoc +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc index a90cdf5ecd2de..19c371d2ef931 100644 --- a/docs/reference/esql/functions/description/st_y.asciidoc +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/starts_with.asciidoc b/docs/reference/esql/functions/description/starts_with.asciidoc index 4d07dfae3046d..f21cd724be6ef 100644 --- a/docs/reference/esql/functions/description/starts_with.asciidoc +++ b/docs/reference/esql/functions/description/starts_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/substring.asciidoc b/docs/reference/esql/functions/description/substring.asciidoc index 7771777b37999..edb97b219bbe0 100644 --- a/docs/reference/esql/functions/description/substring.asciidoc +++ b/docs/reference/esql/functions/description/substring.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/tan.asciidoc b/docs/reference/esql/functions/description/tan.asciidoc index 16a8093c9a824..1f6a4f96f59f1 100644 --- a/docs/reference/esql/functions/description/tan.asciidoc +++ b/docs/reference/esql/functions/description/tan.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/tanh.asciidoc b/docs/reference/esql/functions/description/tanh.asciidoc index ba47db32fb371..277783f7f70fe 100644 --- a/docs/reference/esql/functions/description/tanh.asciidoc +++ b/docs/reference/esql/functions/description/tanh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/tau.asciidoc b/docs/reference/esql/functions/description/tau.asciidoc index 5bcb250ed71d1..bb720c2fa737c 100644 --- a/docs/reference/esql/functions/description/tau.asciidoc +++ b/docs/reference/esql/functions/description/tau.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_boolean.asciidoc b/docs/reference/esql/functions/description/to_boolean.asciidoc index 49dc326c2260c..88c9d1707b6b9 100644 --- a/docs/reference/esql/functions/description/to_boolean.asciidoc +++ b/docs/reference/esql/functions/description/to_boolean.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc index 39261615083d5..26f69a3ba8b9c 100644 --- a/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/description/to_cartesianpoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_cartesianshape.asciidoc b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc index fa73652b3a4ae..82d4fcb944093 100644 --- a/docs/reference/esql/functions/description/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/description/to_cartesianshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_datetime.asciidoc b/docs/reference/esql/functions/description/to_datetime.asciidoc index 39347f90def7f..0cdca76c6462a 100644 --- a/docs/reference/esql/functions/description/to_datetime.asciidoc +++ b/docs/reference/esql/functions/description/to_datetime.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_degrees.asciidoc b/docs/reference/esql/functions/description/to_degrees.asciidoc index b5d0f2bf5054f..6e59a3e8d1893 100644 --- a/docs/reference/esql/functions/description/to_degrees.asciidoc +++ b/docs/reference/esql/functions/description/to_degrees.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_double.asciidoc b/docs/reference/esql/functions/description/to_double.asciidoc index b4a8c4a6a5a7c..b02142d80c61e 100644 --- a/docs/reference/esql/functions/description/to_double.asciidoc +++ b/docs/reference/esql/functions/description/to_double.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_geopoint.asciidoc b/docs/reference/esql/functions/description/to_geopoint.asciidoc index a035c52e29cd9..de59c1b65bb89 100644 --- a/docs/reference/esql/functions/description/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/description/to_geopoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_geoshape.asciidoc b/docs/reference/esql/functions/description/to_geoshape.asciidoc index bf6f21e908ab7..17b3959f681c2 100644 --- a/docs/reference/esql/functions/description/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/description/to_geoshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_integer.asciidoc b/docs/reference/esql/functions/description/to_integer.asciidoc index f31ff152c1c73..8a5bfd2a57927 100644 --- a/docs/reference/esql/functions/description/to_integer.asciidoc +++ b/docs/reference/esql/functions/description/to_integer.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_ip.asciidoc b/docs/reference/esql/functions/description/to_ip.asciidoc index 720ae28d35848..f15cb84bd2c4d 100644 --- a/docs/reference/esql/functions/description/to_ip.asciidoc +++ b/docs/reference/esql/functions/description/to_ip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_long.asciidoc b/docs/reference/esql/functions/description/to_long.asciidoc index 86e6377edb7fe..5ec67005fad35 100644 --- a/docs/reference/esql/functions/description/to_long.asciidoc +++ b/docs/reference/esql/functions/description/to_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_lower.asciidoc b/docs/reference/esql/functions/description/to_lower.asciidoc index 5d4285ffb40c2..93f42d4201bc8 100644 --- a/docs/reference/esql/functions/description/to_lower.asciidoc +++ b/docs/reference/esql/functions/description/to_lower.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_radians.asciidoc b/docs/reference/esql/functions/description/to_radians.asciidoc index b2c0a8c278f30..961a418a751e2 100644 --- a/docs/reference/esql/functions/description/to_radians.asciidoc +++ b/docs/reference/esql/functions/description/to_radians.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_string.asciidoc b/docs/reference/esql/functions/description/to_string.asciidoc index 0a0be5ef7ed65..31557adf86013 100644 --- a/docs/reference/esql/functions/description/to_string.asciidoc +++ b/docs/reference/esql/functions/description/to_string.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_unsigned_long.asciidoc b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc index cb98f90ad1ab1..f3087d5e576d1 100644 --- a/docs/reference/esql/functions/description/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/description/to_unsigned_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_upper.asciidoc b/docs/reference/esql/functions/description/to_upper.asciidoc index a1a4ac32b775e..067f645ede6f6 100644 --- a/docs/reference/esql/functions/description/to_upper.asciidoc +++ b/docs/reference/esql/functions/description/to_upper.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/to_version.asciidoc b/docs/reference/esql/functions/description/to_version.asciidoc index 6d8112800d122..78bacc4073e13 100644 --- a/docs/reference/esql/functions/description/to_version.asciidoc +++ b/docs/reference/esql/functions/description/to_version.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/description/trim.asciidoc b/docs/reference/esql/functions/description/trim.asciidoc index 49e50173f5d04..888189746bf20 100644 --- a/docs/reference/esql/functions/description/trim.asciidoc +++ b/docs/reference/esql/functions/description/trim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Description* diff --git a/docs/reference/esql/functions/examples/abs.asciidoc b/docs/reference/esql/functions/examples/abs.asciidoc index 6e2ce86e8b428..744ac944719b9 100644 --- a/docs/reference/esql/functions/examples/abs.asciidoc +++ b/docs/reference/esql/functions/examples/abs.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Examples* diff --git a/docs/reference/esql/functions/examples/acos.asciidoc b/docs/reference/esql/functions/examples/acos.asciidoc index 947262b2f3ff7..0b3fe6219ab43 100644 --- a/docs/reference/esql/functions/examples/acos.asciidoc +++ b/docs/reference/esql/functions/examples/acos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Example* diff --git a/docs/reference/esql/functions/examples/asin.asciidoc b/docs/reference/esql/functions/examples/asin.asciidoc new file mode 100644 index 0000000000000..16a717172e45d --- /dev/null +++ b/docs/reference/esql/functions/examples/asin.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=asin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=asin-result] +|=== + diff --git a/docs/reference/esql/functions/examples/atan.asciidoc b/docs/reference/esql/functions/examples/atan.asciidoc new file mode 100644 index 0000000000000..e98a8195a71b8 --- /dev/null +++ b/docs/reference/esql/functions/examples/atan.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=atan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=atan-result] +|=== + diff --git a/docs/reference/esql/functions/examples/atan2.asciidoc b/docs/reference/esql/functions/examples/atan2.asciidoc new file mode 100644 index 0000000000000..de803ac3e5859 --- /dev/null +++ b/docs/reference/esql/functions/examples/atan2.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=atan2] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=atan2-result] +|=== + diff --git a/docs/reference/esql/functions/examples/coalesce.asciidoc b/docs/reference/esql/functions/examples/coalesce.asciidoc index b4ba51168fb8c..7ec2c747837aa 100644 --- a/docs/reference/esql/functions/examples/coalesce.asciidoc +++ b/docs/reference/esql/functions/examples/coalesce.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Example* diff --git a/docs/reference/esql/functions/examples/left.asciidoc b/docs/reference/esql/functions/examples/left.asciidoc index 22ae9239c31f8..1189a5c28047d 100644 --- a/docs/reference/esql/functions/examples/left.asciidoc +++ b/docs/reference/esql/functions/examples/left.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Example* diff --git a/docs/reference/esql/functions/layout/abs.asciidoc b/docs/reference/esql/functions/layout/abs.asciidoc index 903266ae54342..cc3c1d5634c7e 100644 --- a/docs/reference/esql/functions/layout/abs.asciidoc +++ b/docs/reference/esql/functions/layout/abs.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-abs]] diff --git a/docs/reference/esql/functions/layout/acos.asciidoc b/docs/reference/esql/functions/layout/acos.asciidoc index 66e540dc766d6..8bc834460f01a 100644 --- a/docs/reference/esql/functions/layout/acos.asciidoc +++ b/docs/reference/esql/functions/layout/acos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-acos]] diff --git a/docs/reference/esql/functions/layout/asin.asciidoc b/docs/reference/esql/functions/layout/asin.asciidoc index b4c0fff8995e6..a092f3c13d72c 100644 --- a/docs/reference/esql/functions/layout/asin.asciidoc +++ b/docs/reference/esql/functions/layout/asin.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-asin]] @@ -12,3 +12,4 @@ image::esql/functions/signature/asin.svg[Embedded,opts=inline] include::../parameters/asin.asciidoc[] include::../description/asin.asciidoc[] include::../types/asin.asciidoc[] +include::../examples/asin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan.asciidoc b/docs/reference/esql/functions/layout/atan.asciidoc index 933d40c7e1d96..c92523ef39fae 100644 --- a/docs/reference/esql/functions/layout/atan.asciidoc +++ b/docs/reference/esql/functions/layout/atan.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-atan]] @@ -12,3 +12,4 @@ image::esql/functions/signature/atan.svg[Embedded,opts=inline] include::../parameters/atan.asciidoc[] include::../description/atan.asciidoc[] include::../types/atan.asciidoc[] +include::../examples/atan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/atan2.asciidoc b/docs/reference/esql/functions/layout/atan2.asciidoc index 9a2cfbfdf01d0..b23aa95aa56b8 100644 --- a/docs/reference/esql/functions/layout/atan2.asciidoc +++ b/docs/reference/esql/functions/layout/atan2.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-atan2]] @@ -12,3 +12,4 @@ image::esql/functions/signature/atan2.svg[Embedded,opts=inline] include::../parameters/atan2.asciidoc[] include::../description/atan2.asciidoc[] include::../types/atan2.asciidoc[] +include::../examples/atan2.asciidoc[] diff --git a/docs/reference/esql/functions/layout/auto_bucket.asciidoc b/docs/reference/esql/functions/layout/auto_bucket.asciidoc index 64c6fbcd7b627..82e05ab5d215c 100644 --- a/docs/reference/esql/functions/layout/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/layout/auto_bucket.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-auto_bucket]] diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc index 80ac137f50e08..192e74522b8d3 100644 --- a/docs/reference/esql/functions/layout/case.asciidoc +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-case]] diff --git a/docs/reference/esql/functions/layout/ceil.asciidoc b/docs/reference/esql/functions/layout/ceil.asciidoc index ebbf7b0adbced..480aeb759936d 100644 --- a/docs/reference/esql/functions/layout/ceil.asciidoc +++ b/docs/reference/esql/functions/layout/ceil.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-ceil]] diff --git a/docs/reference/esql/functions/layout/coalesce.asciidoc b/docs/reference/esql/functions/layout/coalesce.asciidoc index 4ef3d3a682ea7..47cae18197402 100644 --- a/docs/reference/esql/functions/layout/coalesce.asciidoc +++ b/docs/reference/esql/functions/layout/coalesce.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-coalesce]] diff --git a/docs/reference/esql/functions/layout/concat.asciidoc b/docs/reference/esql/functions/layout/concat.asciidoc index 55dd8f8f49e1f..fe3b544a8a9c3 100644 --- a/docs/reference/esql/functions/layout/concat.asciidoc +++ b/docs/reference/esql/functions/layout/concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-concat]] diff --git a/docs/reference/esql/functions/layout/cos.asciidoc b/docs/reference/esql/functions/layout/cos.asciidoc index e781acead4015..7b97f40529096 100644 --- a/docs/reference/esql/functions/layout/cos.asciidoc +++ b/docs/reference/esql/functions/layout/cos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-cos]] diff --git a/docs/reference/esql/functions/layout/cosh.asciidoc b/docs/reference/esql/functions/layout/cosh.asciidoc index 27146ed18d629..e36a96e0eb324 100644 --- a/docs/reference/esql/functions/layout/cosh.asciidoc +++ b/docs/reference/esql/functions/layout/cosh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-cosh]] diff --git a/docs/reference/esql/functions/layout/date_diff.asciidoc b/docs/reference/esql/functions/layout/date_diff.asciidoc index 928fffd329960..37ef5ea874853 100644 --- a/docs/reference/esql/functions/layout/date_diff.asciidoc +++ b/docs/reference/esql/functions/layout/date_diff.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-date_diff]] diff --git a/docs/reference/esql/functions/layout/date_extract.asciidoc b/docs/reference/esql/functions/layout/date_extract.asciidoc index 419179af93621..90c5fa68c9e1d 100644 --- a/docs/reference/esql/functions/layout/date_extract.asciidoc +++ b/docs/reference/esql/functions/layout/date_extract.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-date_extract]] diff --git a/docs/reference/esql/functions/layout/date_parse.asciidoc b/docs/reference/esql/functions/layout/date_parse.asciidoc index 2bb82cd47c59e..172208196a329 100644 --- a/docs/reference/esql/functions/layout/date_parse.asciidoc +++ b/docs/reference/esql/functions/layout/date_parse.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-date_parse]] diff --git a/docs/reference/esql/functions/layout/e.asciidoc b/docs/reference/esql/functions/layout/e.asciidoc index 89b1ad06a5f11..a0e1ca3830e30 100644 --- a/docs/reference/esql/functions/layout/e.asciidoc +++ b/docs/reference/esql/functions/layout/e.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-e]] diff --git a/docs/reference/esql/functions/layout/ends_with.asciidoc b/docs/reference/esql/functions/layout/ends_with.asciidoc index 85828298acef6..b2ff1268a951d 100644 --- a/docs/reference/esql/functions/layout/ends_with.asciidoc +++ b/docs/reference/esql/functions/layout/ends_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-ends_with]] diff --git a/docs/reference/esql/functions/layout/floor.asciidoc b/docs/reference/esql/functions/layout/floor.asciidoc index e51fa1ac1524c..f3f05260989dc 100644 --- a/docs/reference/esql/functions/layout/floor.asciidoc +++ b/docs/reference/esql/functions/layout/floor.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-floor]] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc index a2a54963354da..1ff17f3c3adfe 100644 --- a/docs/reference/esql/functions/layout/greatest.asciidoc +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-greatest]] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc index 9a220289f3d44..a14a166c8bfe4 100644 --- a/docs/reference/esql/functions/layout/least.asciidoc +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-least]] diff --git a/docs/reference/esql/functions/layout/left.asciidoc b/docs/reference/esql/functions/layout/left.asciidoc index 4e825abf3e50e..3a995a2f9a247 100644 --- a/docs/reference/esql/functions/layout/left.asciidoc +++ b/docs/reference/esql/functions/layout/left.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-left]] diff --git a/docs/reference/esql/functions/layout/length.asciidoc b/docs/reference/esql/functions/layout/length.asciidoc index b11b047caf2ed..03d81bb2f931f 100644 --- a/docs/reference/esql/functions/layout/length.asciidoc +++ b/docs/reference/esql/functions/layout/length.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-length]] diff --git a/docs/reference/esql/functions/layout/log.asciidoc b/docs/reference/esql/functions/layout/log.asciidoc index 4f0bb5b6527a3..d5ce98c524421 100644 --- a/docs/reference/esql/functions/layout/log.asciidoc +++ b/docs/reference/esql/functions/layout/log.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-log]] diff --git a/docs/reference/esql/functions/layout/log10.asciidoc b/docs/reference/esql/functions/layout/log10.asciidoc index 6732377e81fdd..3de3008e5b91a 100644 --- a/docs/reference/esql/functions/layout/log10.asciidoc +++ b/docs/reference/esql/functions/layout/log10.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-log10]] diff --git a/docs/reference/esql/functions/layout/ltrim.asciidoc b/docs/reference/esql/functions/layout/ltrim.asciidoc index a6c7bbe2330cb..54025dee54b7a 100644 --- a/docs/reference/esql/functions/layout/ltrim.asciidoc +++ b/docs/reference/esql/functions/layout/ltrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-ltrim]] diff --git a/docs/reference/esql/functions/layout/mv_avg.asciidoc b/docs/reference/esql/functions/layout/mv_avg.asciidoc index 0f0e49298c414..dc1913e53c26a 100644 --- a/docs/reference/esql/functions/layout/mv_avg.asciidoc +++ b/docs/reference/esql/functions/layout/mv_avg.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_avg]] diff --git a/docs/reference/esql/functions/layout/mv_concat.asciidoc b/docs/reference/esql/functions/layout/mv_concat.asciidoc index e5cfc2b1ba74c..d5d3b98e59f59 100644 --- a/docs/reference/esql/functions/layout/mv_concat.asciidoc +++ b/docs/reference/esql/functions/layout/mv_concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_concat]] diff --git a/docs/reference/esql/functions/layout/mv_count.asciidoc b/docs/reference/esql/functions/layout/mv_count.asciidoc index a575452aa24df..a8a0286c114d0 100644 --- a/docs/reference/esql/functions/layout/mv_count.asciidoc +++ b/docs/reference/esql/functions/layout/mv_count.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_count]] diff --git a/docs/reference/esql/functions/layout/mv_dedupe.asciidoc b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc index ed45c0d511e53..332cdfc32ace5 100644 --- a/docs/reference/esql/functions/layout/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/layout/mv_dedupe.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_dedupe]] diff --git a/docs/reference/esql/functions/layout/mv_first.asciidoc b/docs/reference/esql/functions/layout/mv_first.asciidoc index 6f6ed924c5496..270861cf99e5f 100644 --- a/docs/reference/esql/functions/layout/mv_first.asciidoc +++ b/docs/reference/esql/functions/layout/mv_first.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_first]] diff --git a/docs/reference/esql/functions/layout/mv_last.asciidoc b/docs/reference/esql/functions/layout/mv_last.asciidoc index 6e65a3ebb17b4..f1c183d0723ad 100644 --- a/docs/reference/esql/functions/layout/mv_last.asciidoc +++ b/docs/reference/esql/functions/layout/mv_last.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_last]] diff --git a/docs/reference/esql/functions/layout/mv_max.asciidoc b/docs/reference/esql/functions/layout/mv_max.asciidoc index c687d68fda8f1..7c5155b97b7ac 100644 --- a/docs/reference/esql/functions/layout/mv_max.asciidoc +++ b/docs/reference/esql/functions/layout/mv_max.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_max]] diff --git a/docs/reference/esql/functions/layout/mv_median.asciidoc b/docs/reference/esql/functions/layout/mv_median.asciidoc index ad131ccbb6e53..70c84319bdbfc 100644 --- a/docs/reference/esql/functions/layout/mv_median.asciidoc +++ b/docs/reference/esql/functions/layout/mv_median.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_median]] diff --git a/docs/reference/esql/functions/layout/mv_min.asciidoc b/docs/reference/esql/functions/layout/mv_min.asciidoc index 52a1a1ec6091d..78b74318d0dc1 100644 --- a/docs/reference/esql/functions/layout/mv_min.asciidoc +++ b/docs/reference/esql/functions/layout/mv_min.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_min]] diff --git a/docs/reference/esql/functions/layout/mv_slice.asciidoc b/docs/reference/esql/functions/layout/mv_slice.asciidoc index a9dff4c77dd54..87c5d26e7747b 100644 --- a/docs/reference/esql/functions/layout/mv_slice.asciidoc +++ b/docs/reference/esql/functions/layout/mv_slice.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_slice]] diff --git a/docs/reference/esql/functions/layout/mv_sort.asciidoc b/docs/reference/esql/functions/layout/mv_sort.asciidoc index d7822e6356106..1207b915b33c0 100644 --- a/docs/reference/esql/functions/layout/mv_sort.asciidoc +++ b/docs/reference/esql/functions/layout/mv_sort.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_sort]] diff --git a/docs/reference/esql/functions/layout/mv_sum.asciidoc b/docs/reference/esql/functions/layout/mv_sum.asciidoc index df0830c83a2eb..963a936ee4111 100644 --- a/docs/reference/esql/functions/layout/mv_sum.asciidoc +++ b/docs/reference/esql/functions/layout/mv_sum.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_sum]] diff --git a/docs/reference/esql/functions/layout/mv_zip.asciidoc b/docs/reference/esql/functions/layout/mv_zip.asciidoc index 8ce6c94dc6585..29d9273423264 100644 --- a/docs/reference/esql/functions/layout/mv_zip.asciidoc +++ b/docs/reference/esql/functions/layout/mv_zip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-mv_zip]] diff --git a/docs/reference/esql/functions/layout/pi.asciidoc b/docs/reference/esql/functions/layout/pi.asciidoc index fc6e549af9f4b..402e7b28481d6 100644 --- a/docs/reference/esql/functions/layout/pi.asciidoc +++ b/docs/reference/esql/functions/layout/pi.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-pi]] diff --git a/docs/reference/esql/functions/layout/pow.asciidoc b/docs/reference/esql/functions/layout/pow.asciidoc index c91944acc66bc..019c17b7a03c6 100644 --- a/docs/reference/esql/functions/layout/pow.asciidoc +++ b/docs/reference/esql/functions/layout/pow.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-pow]] diff --git a/docs/reference/esql/functions/layout/replace.asciidoc b/docs/reference/esql/functions/layout/replace.asciidoc index bd060aa4e6d0d..9f51c544e2c6a 100644 --- a/docs/reference/esql/functions/layout/replace.asciidoc +++ b/docs/reference/esql/functions/layout/replace.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-replace]] diff --git a/docs/reference/esql/functions/layout/right.asciidoc b/docs/reference/esql/functions/layout/right.asciidoc index d8af40a3e9b19..86e059f5ad4f2 100644 --- a/docs/reference/esql/functions/layout/right.asciidoc +++ b/docs/reference/esql/functions/layout/right.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-right]] diff --git a/docs/reference/esql/functions/layout/round.asciidoc b/docs/reference/esql/functions/layout/round.asciidoc index 815ba1f9a7fe7..8424432052750 100644 --- a/docs/reference/esql/functions/layout/round.asciidoc +++ b/docs/reference/esql/functions/layout/round.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-round]] diff --git a/docs/reference/esql/functions/layout/rtrim.asciidoc b/docs/reference/esql/functions/layout/rtrim.asciidoc index e15b41479d2e2..984b1432ccc9b 100644 --- a/docs/reference/esql/functions/layout/rtrim.asciidoc +++ b/docs/reference/esql/functions/layout/rtrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-rtrim]] diff --git a/docs/reference/esql/functions/layout/sin.asciidoc b/docs/reference/esql/functions/layout/sin.asciidoc index 7b45fcf72c38e..802045d0a23af 100644 --- a/docs/reference/esql/functions/layout/sin.asciidoc +++ b/docs/reference/esql/functions/layout/sin.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-sin]] diff --git a/docs/reference/esql/functions/layout/sinh.asciidoc b/docs/reference/esql/functions/layout/sinh.asciidoc index 9ce60af6ed968..3ac1f03a608f6 100644 --- a/docs/reference/esql/functions/layout/sinh.asciidoc +++ b/docs/reference/esql/functions/layout/sinh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-sinh]] diff --git a/docs/reference/esql/functions/layout/split.asciidoc b/docs/reference/esql/functions/layout/split.asciidoc index cff612ec5351e..fc1e8540c4eea 100644 --- a/docs/reference/esql/functions/layout/split.asciidoc +++ b/docs/reference/esql/functions/layout/split.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-split]] diff --git a/docs/reference/esql/functions/layout/sqrt.asciidoc b/docs/reference/esql/functions/layout/sqrt.asciidoc index a14186425841d..2690bfd3f8cfc 100644 --- a/docs/reference/esql/functions/layout/sqrt.asciidoc +++ b/docs/reference/esql/functions/layout/sqrt.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-sqrt]] diff --git a/docs/reference/esql/functions/layout/st_intersects.asciidoc b/docs/reference/esql/functions/layout/st_intersects.asciidoc index e62171330abfc..1d0721b65606e 100644 --- a/docs/reference/esql/functions/layout/st_intersects.asciidoc +++ b/docs/reference/esql/functions/layout/st_intersects.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-st_intersects]] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc index 6ed1ae2b83e91..ce3824aa157b1 100644 --- a/docs/reference/esql/functions/layout/st_x.asciidoc +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-st_x]] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc index 7b4e585611294..702e9097ae689 100644 --- a/docs/reference/esql/functions/layout/st_y.asciidoc +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-st_y]] diff --git a/docs/reference/esql/functions/layout/starts_with.asciidoc b/docs/reference/esql/functions/layout/starts_with.asciidoc index 80fccbb1fc0b1..363b5e3fe33ee 100644 --- a/docs/reference/esql/functions/layout/starts_with.asciidoc +++ b/docs/reference/esql/functions/layout/starts_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-starts_with]] diff --git a/docs/reference/esql/functions/layout/substring.asciidoc b/docs/reference/esql/functions/layout/substring.asciidoc index 343344a1418dd..6bc48c89ed003 100644 --- a/docs/reference/esql/functions/layout/substring.asciidoc +++ b/docs/reference/esql/functions/layout/substring.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-substring]] diff --git a/docs/reference/esql/functions/layout/tan.asciidoc b/docs/reference/esql/functions/layout/tan.asciidoc index d304b2bd10e86..056145f5eed44 100644 --- a/docs/reference/esql/functions/layout/tan.asciidoc +++ b/docs/reference/esql/functions/layout/tan.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-tan]] diff --git a/docs/reference/esql/functions/layout/tanh.asciidoc b/docs/reference/esql/functions/layout/tanh.asciidoc index ab9213e024737..3024ac5fb2aff 100644 --- a/docs/reference/esql/functions/layout/tanh.asciidoc +++ b/docs/reference/esql/functions/layout/tanh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-tanh]] diff --git a/docs/reference/esql/functions/layout/tau.asciidoc b/docs/reference/esql/functions/layout/tau.asciidoc index 542c5db19e4e4..3b402f3e7a59f 100644 --- a/docs/reference/esql/functions/layout/tau.asciidoc +++ b/docs/reference/esql/functions/layout/tau.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-tau]] diff --git a/docs/reference/esql/functions/layout/to_boolean.asciidoc b/docs/reference/esql/functions/layout/to_boolean.asciidoc index cc0dafd0fef23..670b9868be3d7 100644 --- a/docs/reference/esql/functions/layout/to_boolean.asciidoc +++ b/docs/reference/esql/functions/layout/to_boolean.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_boolean]] diff --git a/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc index 22494dd785265..6489414c6612f 100644 --- a/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/layout/to_cartesianpoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_cartesianpoint]] diff --git a/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc index 04f5a821c4917..dbe5285a1d74a 100644 --- a/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/layout/to_cartesianshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_cartesianshape]] diff --git a/docs/reference/esql/functions/layout/to_datetime.asciidoc b/docs/reference/esql/functions/layout/to_datetime.asciidoc index 7815f4b918631..0ac5f7d5db4f9 100644 --- a/docs/reference/esql/functions/layout/to_datetime.asciidoc +++ b/docs/reference/esql/functions/layout/to_datetime.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_datetime]] diff --git a/docs/reference/esql/functions/layout/to_degrees.asciidoc b/docs/reference/esql/functions/layout/to_degrees.asciidoc index 745ed68cda843..2ca3524121f3f 100644 --- a/docs/reference/esql/functions/layout/to_degrees.asciidoc +++ b/docs/reference/esql/functions/layout/to_degrees.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_degrees]] diff --git a/docs/reference/esql/functions/layout/to_double.asciidoc b/docs/reference/esql/functions/layout/to_double.asciidoc index fd8cd7a3f51b2..e9ccbe66762c4 100644 --- a/docs/reference/esql/functions/layout/to_double.asciidoc +++ b/docs/reference/esql/functions/layout/to_double.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_double]] diff --git a/docs/reference/esql/functions/layout/to_geopoint.asciidoc b/docs/reference/esql/functions/layout/to_geopoint.asciidoc index 7dabdf7975617..a1ede90e65043 100644 --- a/docs/reference/esql/functions/layout/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/layout/to_geopoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_geopoint]] diff --git a/docs/reference/esql/functions/layout/to_geoshape.asciidoc b/docs/reference/esql/functions/layout/to_geoshape.asciidoc index 550d5fb47d846..942dd1b7f052a 100644 --- a/docs/reference/esql/functions/layout/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/layout/to_geoshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_geoshape]] diff --git a/docs/reference/esql/functions/layout/to_integer.asciidoc b/docs/reference/esql/functions/layout/to_integer.asciidoc index 32ddd388693fc..c9a57b819ef44 100644 --- a/docs/reference/esql/functions/layout/to_integer.asciidoc +++ b/docs/reference/esql/functions/layout/to_integer.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_integer]] diff --git a/docs/reference/esql/functions/layout/to_ip.asciidoc b/docs/reference/esql/functions/layout/to_ip.asciidoc index 3e6b54d2aa87a..97e54b879693d 100644 --- a/docs/reference/esql/functions/layout/to_ip.asciidoc +++ b/docs/reference/esql/functions/layout/to_ip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_ip]] diff --git a/docs/reference/esql/functions/layout/to_long.asciidoc b/docs/reference/esql/functions/layout/to_long.asciidoc index 4d618e675d74b..00aafb0c8710b 100644 --- a/docs/reference/esql/functions/layout/to_long.asciidoc +++ b/docs/reference/esql/functions/layout/to_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_long]] diff --git a/docs/reference/esql/functions/layout/to_lower.asciidoc b/docs/reference/esql/functions/layout/to_lower.asciidoc index fc6202591cae5..e1fbfb0079547 100644 --- a/docs/reference/esql/functions/layout/to_lower.asciidoc +++ b/docs/reference/esql/functions/layout/to_lower.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_lower]] diff --git a/docs/reference/esql/functions/layout/to_radians.asciidoc b/docs/reference/esql/functions/layout/to_radians.asciidoc index e2f45dbe166ff..e8f19a0bb1f04 100644 --- a/docs/reference/esql/functions/layout/to_radians.asciidoc +++ b/docs/reference/esql/functions/layout/to_radians.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_radians]] diff --git a/docs/reference/esql/functions/layout/to_string.asciidoc b/docs/reference/esql/functions/layout/to_string.asciidoc index f9d8a12e9a5cb..02bf6d75b2dbb 100644 --- a/docs/reference/esql/functions/layout/to_string.asciidoc +++ b/docs/reference/esql/functions/layout/to_string.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_string]] diff --git a/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc index 093c98486ac02..a1d7a2934bf8b 100644 --- a/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/layout/to_unsigned_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_unsigned_long]] diff --git a/docs/reference/esql/functions/layout/to_upper.asciidoc b/docs/reference/esql/functions/layout/to_upper.asciidoc index f23b8a85bce37..a589ab7fe54ed 100644 --- a/docs/reference/esql/functions/layout/to_upper.asciidoc +++ b/docs/reference/esql/functions/layout/to_upper.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_upper]] diff --git a/docs/reference/esql/functions/layout/to_version.asciidoc b/docs/reference/esql/functions/layout/to_version.asciidoc index 919749eb7b0e2..71dbdbf45980a 100644 --- a/docs/reference/esql/functions/layout/to_version.asciidoc +++ b/docs/reference/esql/functions/layout/to_version.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-to_version]] diff --git a/docs/reference/esql/functions/layout/trim.asciidoc b/docs/reference/esql/functions/layout/trim.asciidoc index b93bdf013fd32..3cae051176a4e 100644 --- a/docs/reference/esql/functions/layout/trim.asciidoc +++ b/docs/reference/esql/functions/layout/trim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. [discrete] [[esql-trim]] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 5891a4f9a8af7..4e1aa3c6cc757 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -33,9 +33,9 @@ include::layout/abs.asciidoc[] include::layout/acos.asciidoc[] -include::asin.asciidoc[] -include::atan.asciidoc[] -include::atan2.asciidoc[] +include::layout/asin.asciidoc[] +include::layout/atan.asciidoc[] +include::layout/atan2.asciidoc[] include::ceil.asciidoc[] include::cos.asciidoc[] include::cosh.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc index df4830d9e1cc3..2d06f7e70333d 100644 --- a/docs/reference/esql/functions/parameters/asin.asciidoc +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -1,4 +1,4 @@ *Parameters* `number`:: -Number between -1 and 1 +Number between -1 and 1. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc index 087f73688a093..8527c7f74bb09 100644 --- a/docs/reference/esql/functions/parameters/atan.asciidoc +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -1,4 +1,4 @@ *Parameters* `number`:: -A number +Numeric expression. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc index ab5268fdab612..8dc744ad03e6a 100644 --- a/docs/reference/esql/functions/parameters/atan2.asciidoc +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -1,7 +1,7 @@ *Parameters* `y_coordinate`:: -y coordinate +y coordinate. If `null`, the function returns `null`. `x_coordinate`:: -x coordinate +x coordinate. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/types/abs.asciidoc b/docs/reference/esql/functions/types/abs.asciidoc index 8e9bd02d381e9..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/abs.asciidoc +++ b/docs/reference/esql/functions/types/abs.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/acos.asciidoc b/docs/reference/esql/functions/types/acos.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/acos.asciidoc +++ b/docs/reference/esql/functions/types/acos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/add.asciidoc b/docs/reference/esql/functions/types/add.asciidoc index ff9cb0cdd467b..a0215a803d4e3 100644 --- a/docs/reference/esql/functions/types/add.asciidoc +++ b/docs/reference/esql/functions/types/add.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/asin.asciidoc b/docs/reference/esql/functions/types/asin.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/asin.asciidoc +++ b/docs/reference/esql/functions/types/asin.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/atan.asciidoc b/docs/reference/esql/functions/types/atan.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/atan.asciidoc +++ b/docs/reference/esql/functions/types/atan.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/atan2.asciidoc b/docs/reference/esql/functions/types/atan2.asciidoc index de4fcf9ef3258..d1fffd88a7c3f 100644 --- a/docs/reference/esql/functions/types/atan2.asciidoc +++ b/docs/reference/esql/functions/types/atan2.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index 9fd29f7d4c718..535e2df29c353 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index 74afa40892d39..44acf331a43dc 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/ceil.asciidoc b/docs/reference/esql/functions/types/ceil.asciidoc index 8e9bd02d381e9..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/ceil.asciidoc +++ b/docs/reference/esql/functions/types/ceil.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index a19b3aa073d32..97ac47c2bb505 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/concat.asciidoc b/docs/reference/esql/functions/types/concat.asciidoc index 238351b49e2c8..16fbd6918c40f 100644 --- a/docs/reference/esql/functions/types/concat.asciidoc +++ b/docs/reference/esql/functions/types/concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/date_diff.asciidoc b/docs/reference/esql/functions/types/date_diff.asciidoc index 68a884fe895b1..98adcef51e75c 100644 --- a/docs/reference/esql/functions/types/date_diff.asciidoc +++ b/docs/reference/esql/functions/types/date_diff.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 401889f38b9e9..08bc0f6b51357 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index 6402513dcbe8d..0d9e4b30c7c7b 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/div.asciidoc b/docs/reference/esql/functions/types/div.asciidoc index c7bab205dc96f..79749dda1bc55 100644 --- a/docs/reference/esql/functions/types/div.asciidoc +++ b/docs/reference/esql/functions/types/div.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/e.asciidoc b/docs/reference/esql/functions/types/e.asciidoc index 38679594f7733..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/e.asciidoc +++ b/docs/reference/esql/functions/types/e.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/ends_with.asciidoc b/docs/reference/esql/functions/types/ends_with.asciidoc index ce79dbeeb7afe..a0236634bbf01 100644 --- a/docs/reference/esql/functions/types/ends_with.asciidoc +++ b/docs/reference/esql/functions/types/ends_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/floor.asciidoc b/docs/reference/esql/functions/types/floor.asciidoc index 8e9bd02d381e9..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/floor.asciidoc +++ b/docs/reference/esql/functions/types/floor.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 03df3780236f2..2a14b6280aa0a 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 03df3780236f2..2a14b6280aa0a 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/left.asciidoc b/docs/reference/esql/functions/types/left.asciidoc index 728b5d188b32c..157f61b59316d 100644 --- a/docs/reference/esql/functions/types/left.asciidoc +++ b/docs/reference/esql/functions/types/left.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/length.asciidoc b/docs/reference/esql/functions/types/length.asciidoc index 4c24499f7ab52..db5a48c7c4390 100644 --- a/docs/reference/esql/functions/types/length.asciidoc +++ b/docs/reference/esql/functions/types/length.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/log.asciidoc b/docs/reference/esql/functions/types/log.asciidoc index d5ec4e9e0e2c8..0a59e51e45c72 100644 --- a/docs/reference/esql/functions/types/log.asciidoc +++ b/docs/reference/esql/functions/types/log.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/log10.asciidoc b/docs/reference/esql/functions/types/log10.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/log10.asciidoc +++ b/docs/reference/esql/functions/types/log10.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/ltrim.asciidoc b/docs/reference/esql/functions/types/ltrim.asciidoc index d87cc86423798..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/ltrim.asciidoc +++ b/docs/reference/esql/functions/types/ltrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mod.asciidoc b/docs/reference/esql/functions/types/mod.asciidoc index c7bab205dc96f..79749dda1bc55 100644 --- a/docs/reference/esql/functions/types/mod.asciidoc +++ b/docs/reference/esql/functions/types/mod.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mul.asciidoc b/docs/reference/esql/functions/types/mul.asciidoc index 8bf1f721d316f..188dae5a50982 100644 --- a/docs/reference/esql/functions/types/mul.asciidoc +++ b/docs/reference/esql/functions/types/mul.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_avg.asciidoc b/docs/reference/esql/functions/types/mv_avg.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/mv_avg.asciidoc +++ b/docs/reference/esql/functions/types/mv_avg.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index 7ea15633bc180..e5c8f8eeb6bd7 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index b4ec6706b5008..8af6b76591acb 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index db2476b22d74b..705745d76dbab 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 0346d46de073c..e077c57971a4a 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 0346d46de073c..e077c57971a4a 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index e3301cb2c9abe..4e5f0a5e0ae89 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_median.asciidoc b/docs/reference/esql/functions/types/mv_median.asciidoc index 8e9bd02d381e9..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/mv_median.asciidoc +++ b/docs/reference/esql/functions/types/mv_median.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index e3301cb2c9abe..4e5f0a5e0ae89 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 49b5761f64fc8..568de10f53d32 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index 60056c5369bd1..24925ca8a6587 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_sum.asciidoc b/docs/reference/esql/functions/types/mv_sum.asciidoc index 8e9bd02d381e9..d81bbf36ae3fe 100644 --- a/docs/reference/esql/functions/types/mv_sum.asciidoc +++ b/docs/reference/esql/functions/types/mv_sum.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/mv_zip.asciidoc b/docs/reference/esql/functions/types/mv_zip.asciidoc index b7522019e454f..514041202a1d5 100644 --- a/docs/reference/esql/functions/types/mv_zip.asciidoc +++ b/docs/reference/esql/functions/types/mv_zip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/neg.asciidoc b/docs/reference/esql/functions/types/neg.asciidoc index 7d378b0be53ce..28d3b2a512dec 100644 --- a/docs/reference/esql/functions/types/neg.asciidoc +++ b/docs/reference/esql/functions/types/neg.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index 38c2418bef536..f4da3fd215595 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/pi.asciidoc b/docs/reference/esql/functions/types/pi.asciidoc index 38679594f7733..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/pi.asciidoc +++ b/docs/reference/esql/functions/types/pi.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/pow.asciidoc b/docs/reference/esql/functions/types/pow.asciidoc index fb2f9309539c5..3753fd2317bc5 100644 --- a/docs/reference/esql/functions/types/pow.asciidoc +++ b/docs/reference/esql/functions/types/pow.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/replace.asciidoc b/docs/reference/esql/functions/types/replace.asciidoc index 57ffce246f395..3401c04bbe395 100644 --- a/docs/reference/esql/functions/types/replace.asciidoc +++ b/docs/reference/esql/functions/types/replace.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/right.asciidoc b/docs/reference/esql/functions/types/right.asciidoc index 728b5d188b32c..157f61b59316d 100644 --- a/docs/reference/esql/functions/types/right.asciidoc +++ b/docs/reference/esql/functions/types/right.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 3ff795ff1f5a2..ea9ab93825d4f 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/rtrim.asciidoc b/docs/reference/esql/functions/types/rtrim.asciidoc index d87cc86423798..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/rtrim.asciidoc +++ b/docs/reference/esql/functions/types/rtrim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/split.asciidoc b/docs/reference/esql/functions/types/split.asciidoc index 61c4546a54709..8a69a25bd0c48 100644 --- a/docs/reference/esql/functions/types/split.asciidoc +++ b/docs/reference/esql/functions/types/split.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/sqrt.asciidoc b/docs/reference/esql/functions/types/sqrt.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/sqrt.asciidoc +++ b/docs/reference/esql/functions/types/sqrt.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/st_intersects.asciidoc b/docs/reference/esql/functions/types/st_intersects.asciidoc index 15e3de732f146..36bd9cc036ade 100644 --- a/docs/reference/esql/functions/types/st_intersects.asciidoc +++ b/docs/reference/esql/functions/types/st_intersects.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/st_x.asciidoc b/docs/reference/esql/functions/types/st_x.asciidoc index 982ddf17ceaad..31cbd98042f1b 100644 --- a/docs/reference/esql/functions/types/st_x.asciidoc +++ b/docs/reference/esql/functions/types/st_x.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/st_y.asciidoc b/docs/reference/esql/functions/types/st_y.asciidoc index 982ddf17ceaad..31cbd98042f1b 100644 --- a/docs/reference/esql/functions/types/st_y.asciidoc +++ b/docs/reference/esql/functions/types/st_y.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/starts_with.asciidoc b/docs/reference/esql/functions/types/starts_with.asciidoc index 46d975723a43f..a6c77014966b4 100644 --- a/docs/reference/esql/functions/types/starts_with.asciidoc +++ b/docs/reference/esql/functions/types/starts_with.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/sub.asciidoc b/docs/reference/esql/functions/types/sub.asciidoc index f2600f6201e90..c439830b7d1e3 100644 --- a/docs/reference/esql/functions/types/sub.asciidoc +++ b/docs/reference/esql/functions/types/sub.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/substring.asciidoc b/docs/reference/esql/functions/types/substring.asciidoc index 784b8a7d48da8..7837c14b5a956 100644 --- a/docs/reference/esql/functions/types/substring.asciidoc +++ b/docs/reference/esql/functions/types/substring.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/tau.asciidoc b/docs/reference/esql/functions/types/tau.asciidoc index 38679594f7733..50e9c47238e34 100644 --- a/docs/reference/esql/functions/types/tau.asciidoc +++ b/docs/reference/esql/functions/types/tau.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc index 389c96bbab0f8..2ab34243db67d 100644 --- a/docs/reference/esql/functions/types/to_boolean.asciidoc +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc index 04d994833e8b3..52339abb70512 100644 --- a/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianpoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc index 7b74e9c797575..bc42ddcb99221 100644 --- a/docs/reference/esql/functions/types/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/types/to_cartesianshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index a3fff3beeee20..52c4cebb661cf 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/to_degrees.asciidoc +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc index ea3c213f1e307..c78c3974af5a5 100644 --- a/docs/reference/esql/functions/types/to_double.asciidoc +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_geopoint.asciidoc b/docs/reference/esql/functions/types/to_geopoint.asciidoc index 9d79461e3c2c9..6b833c4cfeabd 100644 --- a/docs/reference/esql/functions/types/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/types/to_geopoint.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_geoshape.asciidoc b/docs/reference/esql/functions/types/to_geoshape.asciidoc index 0ba662944df80..98063e2766e88 100644 --- a/docs/reference/esql/functions/types/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/types/to_geoshape.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc index 6e1ce99b109e3..11fd7914c5b0f 100644 --- a/docs/reference/esql/functions/types/to_integer.asciidoc +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index 3c038215f824c..d2f94889b81ef 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index 9b4ba9f91b835..4bc927fd94697 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_lower.asciidoc b/docs/reference/esql/functions/types/to_lower.asciidoc index 8174a8b93bcd5..974066d225bca 100644 --- a/docs/reference/esql/functions/types/to_lower.asciidoc +++ b/docs/reference/esql/functions/types/to_lower.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc index 5172c78a3b96b..7cda278abdb56 100644 --- a/docs/reference/esql/functions/types/to_radians.asciidoc +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index c8b2cff97b4e8..f14cfbb39929f 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc index 51fa9e2022603..a271e1a19321d 100644 --- a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_upper.asciidoc b/docs/reference/esql/functions/types/to_upper.asciidoc index 8174a8b93bcd5..974066d225bca 100644 --- a/docs/reference/esql/functions/types/to_upper.asciidoc +++ b/docs/reference/esql/functions/types/to_upper.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/to_version.asciidoc b/docs/reference/esql/functions/types/to_version.asciidoc index fc6daace75c73..f95ea15d6e2b4 100644 --- a/docs/reference/esql/functions/types/to_version.asciidoc +++ b/docs/reference/esql/functions/types/to_version.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/docs/reference/esql/functions/types/trim.asciidoc b/docs/reference/esql/functions/types/trim.asciidoc index d87cc86423798..41d60049d59b8 100644 --- a/docs/reference/esql/functions/types/trim.asciidoc +++ b/docs/reference/esql/functions/types/trim.asciidoc @@ -1,4 +1,4 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. *Supported types* diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index f8f406b269a22..7d5dc206037cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -11,9 +11,12 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.logging.Logger; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xpack.esql.CsvTestUtils.ActualResults; import org.elasticsearch.xpack.versionfield.Version; +import org.hamcrest.Description; import org.hamcrest.Matchers; +import org.hamcrest.StringDescription; import java.util.ArrayList; import java.util.Comparator; @@ -23,6 +26,7 @@ import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; @@ -184,7 +188,7 @@ public static void assertData( actualValues.sort(resultRowComparator(expected.columnTypes())); } var expectedValues = expected.values(); - ArrayList dataFailures = new ArrayList<>(); + List dataFailures = new ArrayList<>(); for (int row = 0; row < expectedValues.size(); row++) { try { @@ -230,7 +234,7 @@ public static void assertData( dataFailures.add(new DataFailure(row, column, transformedExpected, transformedActual)); } if (dataFailures.size() > 10) { - fail("Data mismatch: " + dataFailures); + dataFailure(dataFailures); } } @@ -247,7 +251,7 @@ public static void assertData( } } if (dataFailures.isEmpty() == false) { - fail("Data mismatch: " + dataFailures); + dataFailure(dataFailures); } if (expectedValues.size() < actualValues.size()) { fail( @@ -256,6 +260,19 @@ public static void assertData( } } + private static void dataFailure(List dataFailures) { + fail("Data mismatch:\n" + dataFailures.stream().map(f -> { + Description description = new StringDescription(); + ListMatcher expected = f.expected instanceof List + ? ListMatcher.matchesList().item(f.expected) + : ListMatcher.matchesList((List) f.expected); + List actualList = f.actual instanceof List ? List.of(f.actual) : (List) f.actual; + expected.describeMismatch(actualList, description); + String prefix = "row " + f.row + " column " + f.column + ":"; + return prefix + description.toString().replace("\n", "\n" + prefix); + }).collect(Collectors.joining("\n"))); + } + private static Comparator> resultRowComparator(List types) { return (x, y) -> { for (int i = 0; i < x.size(); i++) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1ca1a0e4a7b8a..332533bd63b02 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -3,11 +3,11 @@ metaFunctions#[skip:-8.13.99] meta functions; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false -acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Number between -1 and 1. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians." | false | false | false -asin |"double asin(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" | "Number between -1 and 1" |double | "Inverse sine trigonometric function." | false | false | false -atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "A number" |double | "Inverse tangent trigonometric function." | false | false | false -atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate", "x coordinate"] |double | "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." | [false, false] | false | false +abs |"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." |"double|integer|long|unsigned_long" | "Returns the absolute value." | false | false | false +acos |"double acos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians." | false | false | false +asin |"double asin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Number between -1 and 1. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians." | false | false | false +atan |"double atan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." |double |"Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians." | false | false | false +atan2 |"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["y coordinate. If `null`\, the function returns `null`.", "x coordinate. If `null`\, the function returns `null`."] |double | "The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians." | [false, false] | false | false auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 5a4ec6b9882fb..c1c1e72633d6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,13 +22,18 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Inverse sine trigonometric function.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input\n" + + "numeric expression as an angle, expressed in radians.", + examples = @Example(file = "floats", tag = "asin") + ) public Asin( Source source, @Param( name = "number", type = { "double", "integer", "long", "unsigned_long" }, - description = "Number between -1 and 1" + description = "Number between -1 and 1. If `null`, the function returns `null`." ) Expression n ) { super(source, n); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 362b497b50f61..6cd3d4b9ffb65 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,10 +22,19 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Inverse tangent trigonometric function.") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input\n" + + "numeric expression as an angle, expressed in radians.", + examples = @Example(file = "floats", tag = "atan") + ) public Atan( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "A number") Expression n + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 33a0f8bb167c5..47a17a90d2d7c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -34,12 +35,22 @@ public class Atan2 extends EsqlScalarFunction { @FunctionInfo( returnType = "double", - description = "The angle between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane." + description = "The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the\n" + + "origin to the point (x , y) in the Cartesian plane, expressed in radians.", + examples = @Example(file = "floats", tag = "atan2") ) public Atan2( Source source, - @Param(name = "y_coordinate", type = { "double", "integer", "long", "unsigned_long" }, description = "y coordinate") Expression y, - @Param(name = "x_coordinate", type = { "double", "integer", "long", "unsigned_long" }, description = "x coordinate") Expression x + @Param( + name = "y_coordinate", + type = { "double", "integer", "long", "unsigned_long" }, + description = "y coordinate. If `null`, the function returns `null`." + ) Expression y, + @Param( + name = "x_coordinate", + type = { "double", "integer", "long", "unsigned_long" }, + description = "x coordinate. If `null`, the function returns `null`." + ) Expression x ) { super(source, List.of(y, x)); this.y = y; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index d58159bfd40e9..d4aba4bf47902 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1110,6 +1110,9 @@ public static void renderDocs() throws IOException { LogManager.getLogger(getTestClass()).info("Skipping rendering types because the function '" + name + "' isn't registered"); } + private static final String DOCS_WARNING = + "// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.\n\n"; + private static void renderTypes(List argNames) throws IOException { StringBuilder header = new StringBuilder(); for (String arg : argNames) { @@ -1131,9 +1134,7 @@ private static void renderTypes(List argNames) throws IOException { } Collections.sort(table); - String rendered = """ - // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. - + String rendered = DOCS_WARNING + """ *Supported types* [%header.monospaced.styled,format=dsv,separator=|] @@ -1155,9 +1156,7 @@ private static void renderParametersList(List argNames, List arg } private static void renderDescription(String description) throws IOException { - String rendered = """ - // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. - + String rendered = DOCS_WARNING + """ *Description* """ + description + "\n"; @@ -1170,12 +1169,12 @@ private static boolean renderExamples(FunctionInfo info) throws IOException { return false; } StringBuilder builder = new StringBuilder(); - builder.append(""" - // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. - - *Example$S$* - - """.replace("$S$", info.examples().length == 1 ? "" : "s")); + builder.append(DOCS_WARNING); + if (info.examples().length == 1) { + builder.append("*Example*\n\n"); + } else { + builder.append("*Examples*\n\n"); + } for (Example example : info.examples()) { builder.append(""" [source.merge.styled,esql] @@ -1196,9 +1195,7 @@ private static boolean renderExamples(FunctionInfo info) throws IOException { } private static void renderFullLayout(String name, boolean hasExamples) throws IOException { - String rendered = """ - // This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. - + String rendered = DOCS_WARNING + """ [discrete] [[esql-$NAME$]] === `$UPPER_NAME$` From bce11c6c1c1c73a4b9a57b367cc6d379b9acac1b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 21 Mar 2024 08:24:32 -0700 Subject: [PATCH 096/214] Avoid using small inactive exchange timeout in breaker tests (#106394) The tests failed because we set an inactive exchange timeout too short, causing a timeout error instead of triggering the expected CircuitBreakingException. Closes #105681 Closes #105543 --- .../compute/operator/exchange/ExchangeService.java | 6 +++++- .../xpack/esql/action/EsqlActionBreakerIT.java | 4 +--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index a8afce1a3b223..9af08346256f7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -25,7 +25,9 @@ import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; @@ -178,13 +180,15 @@ public void messageReceived(OpenExchangeRequest request, TransportChannel channe private class ExchangeTransportAction implements TransportRequestHandler { @Override - public void messageReceived(ExchangeRequest request, TransportChannel channel, Task task) { + public void messageReceived(ExchangeRequest request, TransportChannel channel, Task transportTask) { final String exchangeId = request.exchangeId(); ActionListener listener = new ChannelActionListener<>(channel); final ExchangeSinkHandler sinkHandler = sinks.get(exchangeId); if (sinkHandler == null) { listener.onResponse(new ExchangeResponse(blockFactory, null, true)); } else { + CancellableTask task = (CancellableTask) transportTask; + task.addListener(() -> sinkHandler.onFailure(new TaskCancelledException(task.getReasonCancelled()))); sinkHandler.fetchPageAsync(request.sourcesFinished(), listener); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 85eb0c02625ad..059ed672e56c6 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.action; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteResponse; @@ -35,7 +34,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105543") @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class EsqlActionBreakerIT extends EsqlActionIT { @@ -72,7 +70,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) - .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueSeconds(between(5, 10))) .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker From de7e8980d153ab2633e6ed97c533e93dcd8dadb2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 21 Mar 2024 16:41:31 +0100 Subject: [PATCH 097/214] [DOCS] Amends important note on delayed data detection. (#106610) --- .../ml-delayed-data-detection.asciidoc | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc index f55ab207a2689..b904508d20f3d 100644 --- a/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-delayed-data-detection.asciidoc @@ -54,19 +54,17 @@ image::images/ml-annotations.png["Delayed data annotations in the Single Metric [IMPORTANT] ==== -As the `doc_count` from an aggregation is compared with the -bucket results of the job, the delayed data check will not work correctly in the -following cases: +The delayed data check will not work correctly in the following cases: -* if the datafeed uses aggregations and the job's `analysis_config` does not have its -`summary_count_field_name` set to `doc_count`, -* if the datafeed is _not_ using aggregations and `summary_count_field_name` is set to -any value. +* if the {dfeed} uses aggregations that filter data, +* if the {dfeed} uses aggregations and the job's `analysis_config` does not have +its `summary_count_field_name` set to `doc_count`, +* if the {dfeed} is _not_ using aggregations and `summary_count_field_name` is +set to any value. -If the datafeed is using aggregations then it's highly likely that the job's -`summary_count_field_name` should be set to `doc_count`. If -`summary_count_field_name` is set to any value other than `doc_count`, the -delayed data check for the datafeed must be disabled. +If the datafeed is using aggregations, set the job's `summary_count_field_name` +to `doc_count`. If `summary_count_field_name` is set to any value other than +`doc_count`, the delayed data check for the datafeed must be disabled. ==== There is another tool for visualizing the delayed data on the *Annotations* tab in the {anomaly-detect} job management page: From 521e06d0121726ea540f1ed5c1c4091c1ef5392b Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Mar 2024 09:00:53 -0700 Subject: [PATCH 098/214] Remove node shutdown test check on snapshot build (#106518) Node shutdown is no longer guarded by a feature flag. This commit removes the test assumptions that only allowed some tests to run on snapshots which enable feature flags. --- .../elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index e2bcf10325fd6..fad05e6f213d5 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.shutdown; -import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -51,7 +50,6 @@ protected Collection> nodePlugins() { * reverting to `NOT_STARTED` (this was a bug in the initial implementation). */ public void testShardStatusStaysCompleteAfterNodeLeaves() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToRestartName = internalCluster().startNode(); final String nodeToRestartId = getNodeId(nodeToRestartName); internalCluster().startNode(); @@ -71,7 +69,6 @@ public void testShardStatusStaysCompleteAfterNodeLeaves() throws Exception { * registered. This may happen if {@link NodeSeenService} isn't working as expected. */ public void testShardStatusStaysCompleteAfterNodeLeavesIfRegisteredWhileNodeOffline() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToRestartName = internalCluster().startNode(); final String nodeToRestartId = getNodeId(nodeToRestartName); internalCluster().startNode(); @@ -99,7 +96,6 @@ public Settings onNodeStopped(String nodeName) throws Exception { * (this was a bug in the initial implementation). */ public void testShardStatusIsCompleteOnNonDataNodes() throws Exception { - assumeTrue("must be on a snapshot build of ES to run in order for the feature flag to be set", Build.current().isSnapshot()); final String nodeToShutDownName = internalCluster().startMasterOnlyNode(); internalCluster().startMasterOnlyNode(); // Just to have at least one other node final String nodeToRestartId = getNodeId(nodeToShutDownName); From 76d19749a41bc940db3d697ed89218ad32cbadda Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 21 Mar 2024 17:29:38 +0100 Subject: [PATCH 099/214] Add gradle plugin for publishing docker based test fixtures (#106229) To make those docker images available for our testcontainer based fixtures we need to publish them on a regular basis to ensure the source and the images are in sync. This adds some convenience plugin to take care of publishing our docker test fixtures --- build-tools-internal/build.gradle | 4 ++ .../testfixtures/TestFixtureDeployment.java | 37 ++++++++++++ .../TestFixturesDeployPlugin.java | 60 +++++++++++++++++++ test/fixtures/s3-fixture/build.gradle | 1 - x-pack/test/idp-fixture/build.gradle | 41 +++++-------- 5 files changed, 115 insertions(+), 28 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 24647c366c459..52e72d973f2ed 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -179,6 +179,10 @@ gradlePlugin { id = 'elasticsearch.test.fixtures' implementationClass = 'org.elasticsearch.gradle.internal.testfixtures.TestFixturesPlugin' } + deployTestFixtures { + id = 'elasticsearch.deploy-test-fixtures' + implementationClass = 'org.elasticsearch.gradle.internal.testfixtures.TestFixturesDeployPlugin' + } testBase { id = 'elasticsearch.test-base' implementationClass = 'org.elasticsearch.gradle.internal.ElasticsearchTestBasePlugin' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java new file mode 100644 index 0000000000000..ee6f70db2b788 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureDeployment.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.testfixtures; + +import org.gradle.api.Named; +import org.gradle.api.provider.ListProperty; +import org.gradle.api.provider.Property; + +import java.io.File; + +public abstract class TestFixtureDeployment implements Named { + + private final String name; + + public TestFixtureDeployment(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + public abstract Property getDockerRegistry(); + + public abstract Property getDockerContext(); + + public abstract Property getVersion(); + + public abstract ListProperty getBaseImages(); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java new file mode 100644 index 0000000000000..362ced9c3234e --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.testfixtures; + +import org.apache.commons.lang.StringUtils; +import org.elasticsearch.gradle.Architecture; +import org.elasticsearch.gradle.internal.docker.DockerBuildTask; +import org.elasticsearch.gradle.internal.info.BuildParams; +import org.gradle.api.NamedDomainObjectContainer; +import org.gradle.api.Plugin; +import org.gradle.api.Project; + +import java.util.Arrays; +import java.util.List; + +public class TestFixturesDeployPlugin implements Plugin { + + public static final String DEPLOY_FIXTURE_TASK_NAME = "deployFixtureDockerImages"; + private static String DEFAULT_DOCKER_REGISTRY = "docker.elastic.co/elasticsearch-dev"; + + @Override + public void apply(Project project) { + NamedDomainObjectContainer fixtures = project.container(TestFixtureDeployment.class); + project.getExtensions().add("dockerFixtures", fixtures); + registerDeployTaskPerFixture(project, fixtures); + project.getTasks().register(DEPLOY_FIXTURE_TASK_NAME, task -> task.dependsOn(project.getTasks().withType(DockerBuildTask.class))); + } + + private static void registerDeployTaskPerFixture(Project project, NamedDomainObjectContainer fixtures) { + fixtures.all( + fixture -> project.getTasks() + .register("deploy" + StringUtils.capitalize(fixture.getName()) + "DockerImage", DockerBuildTask.class, task -> { + task.getDockerContext().fileValue(fixture.getDockerContext().get()); + List baseImages = fixture.getBaseImages().get(); + if (baseImages.isEmpty() == false) { + task.setBaseImages(baseImages.toArray(new String[baseImages.size()])); + } + task.setNoCache(BuildParams.isCi()); + task.setTags( + new String[] { + resolveTargetDockerRegistry(fixture) + "/" + fixture.getName() + "-fixture:" + fixture.getVersion().get() } + ); + task.getPush().set(BuildParams.isCi()); + task.getPlatforms().addAll(Arrays.stream(Architecture.values()).map(a -> a.dockerPlatform).toList()); + task.setGroup("Deploy TestFixtures"); + task.setDescription("Deploys the " + fixture.getName() + " test fixture"); + }) + ); + } + + private static String resolveTargetDockerRegistry(TestFixtureDeployment fixture) { + return fixture.getDockerRegistry().getOrElse(DEFAULT_DOCKER_REGISTRY); + } +} diff --git a/test/fixtures/s3-fixture/build.gradle b/test/fixtures/s3-fixture/build.gradle index 0f031ee029f75..fc044c6dc6aa6 100644 --- a/test/fixtures/s3-fixture/build.gradle +++ b/test/fixtures/s3-fixture/build.gradle @@ -8,7 +8,6 @@ apply plugin: 'elasticsearch.java' description = 'Fixture for S3 Storage service' -//tasks.named("test").configure { enabled = false } dependencies { api project(':server') diff --git a/x-pack/test/idp-fixture/build.gradle b/x-pack/test/idp-fixture/build.gradle index 2ef03bf7747cc..4ef8eee8d08a7 100644 --- a/x-pack/test/idp-fixture/build.gradle +++ b/x-pack/test/idp-fixture/build.gradle @@ -1,35 +1,22 @@ -import org.elasticsearch.gradle.Architecture -import org.elasticsearch.gradle.internal.docker.DockerBuildTask -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.cache-test-fixtures' +apply plugin: 'elasticsearch.deploy-test-fixtures' + +dockerFixtures { + idp { + dockerContext = file("src/main/resources/idp") + version = "1.1" + baseImages = ["openjdk:11.0.16-jre"] + } + openldap { + dockerContext = file("src/main/resources/openldap") + version = "1.0" + baseImages = ["osixia/openldap:1.4.0"] + } +} dependencies { testImplementation project(':test:framework') - api project(':test:fixtures:testcontainer-utils') api "junit:junit:${versions.junit}" } - -tasks.withType(DockerBuildTask).configureEach { - noCache = BuildParams.isCi() - push = true //BuildParams.isCi() - getPlatforms().addAll( Architecture.values().collect{ it.dockerPlatform } ) -} - -tasks.register("deployIdpFixtureDockerImages", DockerBuildTask) { - dockerContext.fileValue(file("src/main/resources/idp")) - baseImages = ["openjdk:11.0.16-jre"] - tags = ["docker.elastic.co/elasticsearch-dev/idp-fixture:1.1"] -} - -tasks.register("deployOpenLdapFixtureDockerImages", DockerBuildTask) { - dockerContext.fileValue(file("src/main/resources/openldap")) - baseImages = ["osixia/openldap:1.4.0"] - tags = ["docker.elastic.co/elasticsearch-dev/openldap-fixture:1.0"] -} - -tasks.register("deployFixtureDockerImages") { - dependsOn tasks.withType(DockerBuildTask) -} From a8586f31bd83411bc1934d5c31b58da05d02dca6 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 21 Mar 2024 16:31:10 +0000 Subject: [PATCH 100/214] Document lack of logging BwC (#104459) Our guarantees around backwards compatibility and breaking changes do not apply to the Elasticsearch application logs. This commit adds a note to the docs about this. --- docs/reference/setup/logging-config.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc index 69fa086d67673..f6b0ceb2d6ae6 100644 --- a/docs/reference/setup/logging-config.asciidoc +++ b/docs/reference/setup/logging-config.asciidoc @@ -204,6 +204,13 @@ configuration for other reasons. For example, you may want to send logs for a particular logger to another file. However, these use cases are rare. -- +IMPORTANT: {es}'s application logs are intended for humans to read and +interpret. Different versions of {es} may report information in these logs in +different ways, perhaps adding extra detail, removing unnecessary information, +formatting the same information in different ways, renaming the logger or +adjusting the log level for specific messages. Do not rely on the contents of +the application logs remaining precisely the same between versions. + [discrete] [[deprecation-logging]] === Deprecation logging From 93a21e1b14c6ca611b477360c7c7f65846bd364e Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 21 Mar 2024 16:30:18 +0000 Subject: [PATCH 101/214] AwaitsFix for #106618 --- .../test/java/org/elasticsearch/threadpool/ThreadPoolTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index c6f3d752d103c..8a61054f5fa93 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -465,6 +465,7 @@ public void testScheduledOneShotForceExecution() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106618") public void testScheduledFixedDelayRejection() { final var name = "fixed-bounded"; final var threadPool = new TestThreadPool( From fa00e6176f8723eb60525e064bb668523e6bbaab Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Mar 2024 12:52:04 -0400 Subject: [PATCH 102/214] ESQL: Values aggregation function (#106065) This creates the `VALUES` aggregation function which buffers all field values it receives and emits them as a multivalued field. It can use a significant amount of memory and will circuit break if it uses too much memory, but it's really useful for putting together self-join-like behavior. It sort of functions as a stop-gap measure until we have more self-join style things. In the future we'll have spill-to-disk for aggregations and, likely, some kind of self-join command for aggregations at least so this will be able to grow beyond memory. But for now, memory it is. Example: ``` FROM employees | EVAL first_letter = SUBSTRING(first_name, 0, 1) | STATS first_name=VALUES(first_name) BY first_letter | SORT first_letter ; first_name:keyword | first_letter:keyword [Anneke, Alejandro, Anoosh, Amabile, Arumugam] | A [Bezalel, Berni, Bojan, Basil, Brendon, Berhard, Breannda] | B [Chirstian, Cristinel, Claudi, Charlene] | C [Duangkaew, Divier, Domenick, Danel] | D ``` I made this work for everything but `geo_point` and `cartesian_point` because I'm not 100% sure how to integrate with those. We can grab those in a follow up. Closes #103600 --- docs/changelog/106065.yaml | 6 + .../functions/aggregation-functions.asciidoc | 2 + docs/reference/esql/functions/values.asciidoc | 38 +++ .../compute/ann/IntermediateState.java | 15 +- x-pack/plugin/esql/compute/build.gradle | 21 ++ .../compute/gen/AggregatorImplementer.java | 69 ++-- .../gen/GroupingAggregatorImplementer.java | 32 +- .../aggregation/ValuesBytesRefAggregator.java | 190 +++++++++++ .../aggregation/ValuesDoubleAggregator.java | 180 +++++++++++ .../aggregation/ValuesIntAggregator.java | 187 +++++++++++ .../aggregation/ValuesLongAggregator.java | 180 +++++++++++ ...inctBooleanGroupingAggregatorFunction.java | 12 +- ...nctBytesRefGroupingAggregatorFunction.java | 6 +- ...tinctDoubleGroupingAggregatorFunction.java | 6 +- ...DistinctIntGroupingAggregatorFunction.java | 6 +- ...istinctLongGroupingAggregatorFunction.java | 6 +- .../MaxDoubleGroupingAggregatorFunction.java | 12 +- .../MaxIntGroupingAggregatorFunction.java | 12 +- .../MaxLongGroupingAggregatorFunction.java | 12 +- ...ationDoubleGroupingAggregatorFunction.java | 6 +- ...eviationIntGroupingAggregatorFunction.java | 6 +- ...viationLongGroupingAggregatorFunction.java | 6 +- .../MinDoubleGroupingAggregatorFunction.java | 12 +- .../MinIntGroupingAggregatorFunction.java | 12 +- .../MinLongGroupingAggregatorFunction.java | 12 +- ...ntileDoubleGroupingAggregatorFunction.java | 6 +- ...rcentileIntGroupingAggregatorFunction.java | 6 +- ...centileLongGroupingAggregatorFunction.java | 6 +- .../SumDoubleGroupingAggregatorFunction.java | 18 +- .../SumIntGroupingAggregatorFunction.java | 12 +- .../SumLongGroupingAggregatorFunction.java | 12 +- .../ValuesBooleanAggregatorFunction.java | 120 +++++++ ...luesBooleanAggregatorFunctionSupplier.java | 38 +++ ...luesBooleanGroupingAggregatorFunction.java | 195 +++++++++++ .../ValuesBytesRefAggregatorFunction.java | 124 +++++++ ...uesBytesRefAggregatorFunctionSupplier.java | 38 +++ ...uesBytesRefGroupingAggregatorFunction.java | 201 ++++++++++++ .../ValuesDoubleAggregatorFunction.java | 120 +++++++ ...aluesDoubleAggregatorFunctionSupplier.java | 38 +++ ...aluesDoubleGroupingAggregatorFunction.java | 195 +++++++++++ .../ValuesIntAggregatorFunction.java | 120 +++++++ .../ValuesIntAggregatorFunctionSupplier.java | 38 +++ .../ValuesIntGroupingAggregatorFunction.java | 193 +++++++++++ .../ValuesLongAggregatorFunction.java | 120 +++++++ .../ValuesLongAggregatorFunctionSupplier.java | 38 +++ .../ValuesLongGroupingAggregatorFunction.java | 195 +++++++++++ ...ntDocValuesGroupingAggregatorFunction.java | 30 +- ...ourceValuesGroupingAggregatorFunction.java | 30 +- ...ntDocValuesGroupingAggregatorFunction.java | 30 +- ...ourceValuesGroupingAggregatorFunction.java | 30 +- .../aggregation/ValuesBooleanAggregator.java | 167 ++++++++++ .../aggregation/X-ValuesAggregator.java.st | 306 ++++++++++++++++++ .../ValuesDoubleAggregatorFunctionTests.java | 43 +++ ...DoubleGroupingAggregatorFunctionTests.java | 59 ++++ .../ValuesIntAggregatorFunctionTests.java | 43 +++ ...uesIntGroupingAggregatorFunctionTests.java | 55 ++++ .../ValuesLongAggregatorFunctionTests.java | 43 +++ ...esLongGroupingAggregatorFunctionTests.java | 55 ++++ .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 3 +- .../xpack/esql/ccq/MultiClusterSpecIT.java | 31 +- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 2 +- .../src/main/resources/boolean.csv-spec | 57 ++++ .../src/main/resources/floats.csv-spec | 57 ++++ .../src/main/resources/ints.csv-spec | 171 ++++++++++ .../src/main/resources/ip.csv-spec | 42 +++ .../src/main/resources/meta.csv-spec | 4 +- .../src/main/resources/string.csv-spec | 52 +++ .../src/main/resources/version.csv-spec | 60 ++++ .../esql/expression/EsqlTypeResolutions.java | 12 + .../function/EsqlFunctionRegistry.java | 4 +- .../expression/function/aggregate/Values.java | 86 +++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 5 +- .../xpack/esql/planner/AggregateMapper.java | 7 +- .../xpack/esql/plugin/EsqlFeatures.java | 9 +- .../qa/native-multi-node-tests/build.gradle | 1 + .../ml/integration/MlNativeIntegTestCase.java | 7 +- .../rest-api-spec/test/esql/130_spatial.yml | 8 + .../rest-api-spec/test/esql/80_text.yml | 36 +++ 78 files changed, 4313 insertions(+), 111 deletions(-) create mode 100644 docs/changelog/106065.yaml create mode 100644 docs/reference/esql/functions/values.asciidoc create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java diff --git a/docs/changelog/106065.yaml b/docs/changelog/106065.yaml new file mode 100644 index 0000000000000..b87f4848fb574 --- /dev/null +++ b/docs/changelog/106065.yaml @@ -0,0 +1,6 @@ +pr: 106065 +summary: "ESQL: Values aggregation function" +area: ES|QL +type: feature +issues: + - 103600 diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 91293728fd45c..373b1c140a896 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -18,6 +18,7 @@ The <> function supports these aggregate functions: * <> * <> * <> +* <> // end::agg_list[] include::avg.asciidoc[] @@ -30,3 +31,4 @@ include::min.asciidoc[] include::percentile.asciidoc[] include::st_centroid.asciidoc[] include::sum.asciidoc[] +include::values.asciidoc[] diff --git a/docs/reference/esql/functions/values.asciidoc b/docs/reference/esql/functions/values.asciidoc new file mode 100644 index 0000000000000..9707180058e24 --- /dev/null +++ b/docs/reference/esql/functions/values.asciidoc @@ -0,0 +1,38 @@ +[discrete] +[[esql-agg-values]] +=== `VALUES` + +preview::["Do not use `VALUES` on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +*Syntax* + +[source,esql] +---- +VALUES(expression) +---- + +`expression`:: +Expression of any type except `geo_point`, `cartesian_point`, or `geo_shape`. + +*Description* + +Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. +If you need the values returned in order use <>. + +WARNING: This can use a significant amount of memory and ES|QL doesn't yet + grow aggregations beyond memory. So this aggregation will work until + it is used to collect more values than can fit into memory. Once it + collects too many values it will fail the query with + a <>. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/string.csv-spec[tag=values-grouped] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/string.csv-spec[tag=values-grouped-result] +|=== + diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java index 54a5caa05d149..3781fc6290365 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/IntermediateState.java @@ -7,9 +7,22 @@ package org.elasticsearch.compute.ann; +/** + * Description of a column of data that makes up the intermediate state of + * an aggregation. + */ public @interface IntermediateState { - + /** + * Name of the column. + */ String name(); + /** + * Type of the column. This should be the name of an element type or + * an element type followed by {@code _BLOCK}. If this ends in {@code _BLOCK} + * then the aggregation will the {@code Block} as an argument to + * {@code combineIntermediate} and a position. It's the aggregation's + * responsibility to iterate values from the block as needed. + */ String type(); } diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 532fd51a42437..e5d076aa0e041 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -365,6 +365,27 @@ tasks.named('stringTemplates').configure { it.inputFile = arrayStateInputFile it.outputFile = "org/elasticsearch/compute/aggregation/DoubleArrayState.java" } + File valuesAggregatorInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesLongAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java" + } + template { + it.properties = bytesRefProperties + it.inputFile = valuesAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java" + } File multivalueDedupeInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st") template { it.properties = intProperties diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 1d74416a81894..d3fe51b4cc225 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -108,11 +108,9 @@ public AggregatorImplementer(Elements elements, TypeElement declarationType, Int (declarationType.getSimpleName() + "AggregatorFunction").replace("AggregatorAggregator", "Aggregator") ); this.valuesIsBytesRef = BYTES_REF.equals(TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType())); - intermediateState = Arrays.stream(interStateAnno).map(state -> new IntermediateStateDesc(state.name(), state.type())).toList(); + intermediateState = Arrays.stream(interStateAnno).map(IntermediateStateDesc::newIntermediateStateDesc).toList(); } - record IntermediateStateDesc(String name, String elementType) {} - ClassName implementation() { return implementation; } @@ -229,7 +227,7 @@ private MethodSpec create() { for (Parameter p : createParameters) { builder.addParameter(p.type(), p.name()); } - if (init.getParameters().isEmpty()) { + if (createParameters.isEmpty()) { builder.addStatement("return new $T(driverContext, channels, $L)", implementation, callInit()); } else { builder.addStatement( @@ -410,20 +408,7 @@ private MethodSpec addIntermediateInput() { builder.addStatement("assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size()"); for (int i = 0; i < intermediateState.size(); i++) { var interState = intermediateState.get(i); - ClassName blockType = blockType(interState.elementType()); - builder.addStatement("Block $L = page.getBlock(channels.get($L))", interState.name + "Uncast", i); - builder.beginControlFlow("if ($L.areAllValuesNull())", interState.name + "Uncast"); - { - builder.addStatement("return"); - builder.endControlFlow(); - } - builder.addStatement( - "$T $L = (($T) $L).asVector()", - vectorType(interState.elementType()), - interState.name(), - blockType, - interState.name() + "Uncast" - ); + interState.assignToVariable(builder, i); builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } if (combineIntermediate != null) { @@ -449,15 +434,7 @@ private MethodSpec addIntermediateInput() { } String intermediateStateRowAccess() { - return intermediateState.stream().map(AggregatorImplementer::vectorAccess).collect(joining(", ")); - } - - static String vectorAccess(IntermediateStateDesc isd) { - String s = isd.name() + "." + vectorAccessorName(isd.elementType()) + "(0"; - if (isd.elementType().equals("BYTES_REF")) { - s += ", scratch"; - } - return s + ")"; + return intermediateState.stream().map(desc -> desc.access("0")).collect(joining(", ")); } private String primitiveStateMethod() { @@ -548,4 +525,42 @@ private boolean hasPrimitiveState() { default -> false; }; } + + record IntermediateStateDesc(String name, String elementType, boolean block) { + static IntermediateStateDesc newIntermediateStateDesc(IntermediateState state) { + String type = state.type(); + boolean block = false; + if (type.toUpperCase(Locale.ROOT).endsWith("_BLOCK")) { + type = type.substring(0, type.length() - "_BLOCK".length()); + block = true; + } + return new IntermediateStateDesc(state.name(), type, block); + } + + public String access(String position) { + if (block) { + return name(); + } + String s = name() + "." + vectorAccessorName(elementType()) + "(" + position; + if (elementType().equals("BYTES_REF")) { + s += ", scratch"; + } + return s + ")"; + } + + public void assignToVariable(MethodSpec.Builder builder, int offset) { + builder.addStatement("Block $L = page.getBlock(channels.get($L))", name + "Uncast", offset); + ClassName blockType = blockType(elementType()); + builder.beginControlFlow("if ($L.areAllValuesNull())", name + "Uncast"); + { + builder.addStatement("return"); + builder.endControlFlow(); + } + if (block) { + builder.addStatement("$T $L = ($T) $L", blockType, name, blockType, name + "Uncast"); + } else { + builder.addStatement("$T $L = (($T) $L).asVector()", vectorType(elementType), name, blockType, name + "Uncast"); + } + } + } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index 12e5de9fef591..cc55e19b7d421 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -49,8 +49,6 @@ import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; -import static org.elasticsearch.compute.gen.Types.blockType; -import static org.elasticsearch.compute.gen.Types.vectorType; /** * Implements "GroupingAggregationFunction" from a class containing static methods @@ -72,7 +70,7 @@ public class GroupingAggregatorImplementer { private final boolean valuesIsBytesRef; private final List createParameters; private final ClassName implementation; - private final List intermediateState; + private final List intermediateState; public GroupingAggregatorImplementer(Elements elements, TypeElement declarationType, IntermediateState[] interStateAnno) { this.declarationType = declarationType; @@ -102,11 +100,11 @@ public GroupingAggregatorImplementer(Elements elements, TypeElement declarationT (declarationType.getSimpleName() + "GroupingAggregatorFunction").replace("AggregatorGroupingAggregator", "GroupingAggregator") ); - intermediateState = Arrays.stream(interStateAnno).map(state -> new IntermediateStateDesc(state.name(), state.type())).toList(); + intermediateState = Arrays.stream(interStateAnno) + .map(AggregatorImplementer.IntermediateStateDesc::newIntermediateStateDesc) + .toList(); } - record IntermediateStateDesc(String name, String elementType) {} - public ClassName implementation() { return implementation; } @@ -421,11 +419,7 @@ private MethodSpec addIntermediateInput() { builder.addStatement("assert channels.size() == intermediateBlockCount()"); int count = 0; for (var interState : intermediateState) { - builder.addStatement( - "$T " + interState.name() + " = page.<$T>getBlock(channels.get(" + count + ")).asVector()", - vectorType(interState.elementType()), - blockType(interState.elementType()) - ); + interState.assignToVariable(builder, count); count++; } final String first = intermediateState.get(0).name(); @@ -433,13 +427,13 @@ private MethodSpec addIntermediateInput() { builder.addStatement( "assert " + intermediateState.stream() - .map(IntermediateStateDesc::name) + .map(AggregatorImplementer.IntermediateStateDesc::name) .skip(1) .map(s -> first + ".getPositionCount() == " + s + ".getPositionCount()") .collect(joining(" && ")) ); } - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + if (intermediateState.stream().map(AggregatorImplementer.IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); } builder.beginControlFlow("for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++)"); @@ -469,15 +463,11 @@ private MethodSpec addIntermediateInput() { } String intermediateStateRowAccess() { - return intermediateState.stream().map(GroupingAggregatorImplementer::vectorAccess).collect(joining(", ")); - } - - static String vectorAccess(IntermediateStateDesc isd) { - String s = isd.name() + "." + vectorAccessorName(isd.elementType()) + "(groupPosition + positionOffset"; - if (isd.elementType().equals("BYTES_REF")) { - s += ", scratch"; + String rowAccess = intermediateState.stream().map(desc -> desc.access("groupPosition + positionOffset")).collect(joining(", ")); + if (intermediateState.stream().anyMatch(AggregatorImplementer.IntermediateStateDesc::block)) { + rowAccess += ", groupPosition + positionOffset"; } - return s + ")"; + return rowAccess; } private void combineStates(MethodSpec.Builder builder) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java new file mode 100644 index 0000000000000..736b320a9dde8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * Aggregates field values for BytesRef. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "BYTES_REF_BLOCK") }) +@GroupingAggregator +class ValuesBytesRefAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, BytesRef v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getBytesRef(i, scratch)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, BytesRef v) { + state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); + } + + public static void combineIntermediate(GroupingState state, int groupId, BytesRefBlock values, int valuesPosition) { + BytesRef scratch = new BytesRef(); + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getBytesRef(i, scratch)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + BytesRef scratch = new BytesRef(); + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); + combine(current, currentGroupId, state.bytes.get(value, scratch)); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final BytesRefHash values; + + private SingleState(BigArrays bigArrays) { + values = new BytesRefHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + BytesRef scratch = new BytesRef(); + if (values.size() == 1) { + return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); + } + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendBytesRef(values.get(id, scratch)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + private final BytesRefHash bytes; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + bytes = new BytesRefHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + BytesRef scratch = new BytesRef(); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + long first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendBytesRef(bytes.get(first, scratch)); + builder.appendBytesRef(bytes.get(value, scratch)); + } + default -> builder.appendBytesRef(bytes.get(value, scratch)); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendBytesRef(bytes.get(first, scratch)); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + Releasables.closeExpectNoException(values, bytes); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java new file mode 100644 index 0000000000000..a8409367bc090 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesDoubleAggregator.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for double. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "DOUBLE_BLOCK") }) +@GroupingAggregator +class ValuesDoubleAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, double v) { + state.values.add(Double.doubleToLongBits(v)); + } + + public static void combineIntermediate(SingleState state, DoubleBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getDouble(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, double v) { + state.values.add(groupId, Double.doubleToLongBits(v)); + } + + public static void combineIntermediate(GroupingState state, int groupId, DoubleBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getDouble(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + double value = Double.longBitsToDouble(state.values.getKey2(id)); + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantDoubleBlockWith(Double.longBitsToDouble(values.get(0)), 1); + } + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendDouble(Double.longBitsToDouble(values.get(id))); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + double first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + double value = Double.longBitsToDouble(values.getKey2(id)); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendDouble(first); + builder.appendDouble(value); + } + default -> builder.appendDouble(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendDouble(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java new file mode 100644 index 0000000000000..2420dcee70712 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesIntAggregator.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for int. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "INT_BLOCK") }) +@GroupingAggregator +class ValuesIntAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, int v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, IntBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getInt(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, int v) { + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); + } + + public static void combineIntermediate(GroupingState state, int groupId, IntBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getInt(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + long both = state.values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == statePosition) { + int value = (int) both; + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantIntBlockWith((int) values.get(0), 1); + } + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendInt((int) values.get(id)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + int first = 0; + for (int id = 0; id < values.size(); id++) { + long both = values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == selectedGroup) { + int value = (int) both; + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendInt(first); + builder.appendInt(value); + } + default -> builder.appendInt(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendInt(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java new file mode 100644 index 0000000000000..4938b8f15edb0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesLongAggregator.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.LongLongHash; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; + +/** + * Aggregates field values for long. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "LONG_BLOCK") }) +@GroupingAggregator +class ValuesLongAggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, long v) { + state.values.add(v); + } + + public static void combineIntermediate(SingleState state, LongBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getLong(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, long v) { + state.values.add(groupId, v); + } + + public static void combineIntermediate(GroupingState state, int groupId, LongBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getLong(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + for (int id = 0; id < state.values.size(); id++) { + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); + combine(current, currentGroupId, value); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private final LongHash values; + + private SingleState(BigArrays bigArrays) { + values = new LongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } + if (values.size() == 1) { + return blockFactory.newConstantLongBlockWith(values.get(0), 1); + } + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { + builder.appendLong(values.get(id)); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { + private final LongLongHash values; + + private GroupingState(BigArrays bigArrays) { + values = new LongLongHash(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + long first = 0; + for (int id = 0; id < values.size(); id++) { + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.appendLong(first); + builder.appendLong(value); + } + default -> builder.appendLong(value); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendLong(first); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { + values.close(); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index eb618f4569ce7..a12677e70e8a9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, BooleanVector valu public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + Block fbitUncast = page.getBlock(channels.get(0)); + if (fbitUncast.areAllValuesNull()) { + return; + } + BooleanVector fbit = ((BooleanBlock) fbitUncast).asVector(); + Block tbitUncast = page.getBlock(channels.get(1)); + if (tbitUncast.areAllValuesNull()) { + return; + } + BooleanVector tbit = ((BooleanBlock) tbitUncast).asVector(); assert fbit.getPositionCount() == tbit.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index ba2eaf66bf2af..4879df5cf1c2c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -155,7 +155,11 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index 2bb273bf2598c..1e0ce58377f9e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index 6e1017d962254..99e6ace52b256 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -151,7 +151,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index d0c6cedeed2ff..85f823296c886 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { + return; + } + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index 86ada78737512..da93320eaf96e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + DoubleVector max = ((DoubleBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index d0809b2a6853c..c8b1b6910c0aa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + IntVector max = ((IntBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index 5b28fa01ef0ce..41d893f9bbf0c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { + return; + } + LongVector max = ((LongBlock) maxUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert max.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index 39d65eabbe4b7..e08488685d2cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -150,7 +150,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index ec7b21fc440a1..02866ee15b961 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -148,7 +148,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 4028bac4628a1..36c40e10e54d5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -150,7 +150,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index 4d0e78a26865d..7d0374b3d21f7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + DoubleVector min = ((DoubleBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index 97b5eafa9e72e..6625fd327237b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -148,8 +148,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + IntVector min = ((IntBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index 7e68a4b933841..f0c3727d7db0b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { + return; + } + LongVector min = ((LongBlock) minUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert min.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index e8c9fe4728308..9d486b9614dab 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index fb4e06784823d..8c2bd7091143f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -151,7 +151,11 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index 45ce7d0d1c267..c1c332ba0094d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -153,7 +153,11 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { + return; + } + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); BytesRef scratch = new BytesRef(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index f60a3c8cf152a..5085cfc3bebcf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -151,9 +151,21 @@ private void addRawInput(int positionOffset, IntBlock groups, DoubleVector value public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + Block valueUncast = page.getBlock(channels.get(0)); + if (valueUncast.areAllValuesNull()) { + return; + } + DoubleVector value = ((DoubleBlock) valueUncast).asVector(); + Block deltaUncast = page.getBlock(channels.get(1)); + if (deltaUncast.areAllValuesNull()) { + return; + } + DoubleVector delta = ((DoubleBlock) deltaUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index 373a7af4b3d67..6891fe548908f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, IntVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { + return; + } + LongVector sum = ((LongBlock) sumUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index 46e993b49c666..507aa343aa74e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -150,8 +150,16 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { + return; + } + LongVector sum = ((LongBlock) sumUncast).asVector(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert sum.getPositionCount() == seen.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java new file mode 100644 index 0000000000000..3e9bc91e0039a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BOOLEAN) ); + + private final DriverContext driverContext; + + private final ValuesBooleanAggregator.SingleState state; + + private final List channels; + + public ValuesBooleanAggregatorFunction(DriverContext driverContext, List channels, + ValuesBooleanAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesBooleanAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesBooleanAggregatorFunction(driverContext, channels, ValuesBooleanAggregator.initSingle()); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BooleanVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + + private void addRawBlock(BooleanBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock values = (BooleanBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesBooleanAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesBooleanAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..20d5a5fda7726 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesBooleanAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesBooleanAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesBooleanAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesBooleanGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesBooleanGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of booleans"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..16e92a7c69ca8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesBooleanAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBooleanGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BOOLEAN) ); + + private final ValuesBooleanAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesBooleanGroupingAggregatorFunction(List channels, + ValuesBooleanAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesBooleanGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesBooleanGroupingAggregatorFunction(channels, ValuesBooleanAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BooleanBlock valuesBlock = page.getBlock(channels.get(0)); + BooleanVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BooleanVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesBooleanAggregator.combine(state, groupId, values.getBoolean(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BooleanBlock values = (BooleanBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBooleanAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesBooleanAggregator.GroupingState inState = ((ValuesBooleanGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesBooleanAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesBooleanAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java new file mode 100644 index 0000000000000..24b88f59e38f4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BYTES_REF) ); + + private final DriverContext driverContext; + + private final ValuesBytesRefAggregator.SingleState state; + + private final List channels; + + public ValuesBytesRefAggregatorFunction(DriverContext driverContext, List channels, + ValuesBytesRefAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesBytesRefAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesBytesRefAggregatorFunction(driverContext, channels, ValuesBytesRefAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(BytesRefVector vector) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + + private void addRawBlock(BytesRefBlock block) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + assert values.getPositionCount() == 1; + BytesRef scratch = new BytesRef(); + ValuesBytesRefAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesBytesRefAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..31de817edf868 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesBytesRefAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesBytesRefAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesBytesRefGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of bytes"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..f9a51fcc52221 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefGroupingAggregatorFunction.java @@ -0,0 +1,201 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesBytesRefAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesBytesRefGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.BYTES_REF) ); + + private final ValuesBytesRefAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesBytesRefGroupingAggregatorFunction(List channels, + ValuesBytesRefAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesBytesRefGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesBytesRefGroupingAggregatorFunction(channels, ValuesBytesRefAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); + BytesRefVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefBlock values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(v, scratch)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector values) { + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesBytesRefAggregator.combine(state, groupId, values.getBytesRef(groupPosition + positionOffset, scratch)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + BytesRefBlock values = (BytesRefBlock) valuesUncast; + BytesRef scratch = new BytesRef(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesBytesRefAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesBytesRefAggregator.GroupingState inState = ((ValuesBytesRefGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesBytesRefAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesBytesRefAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java new file mode 100644 index 0000000000000..a6295038dbd7a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.DOUBLE) ); + + private final DriverContext driverContext; + + private final ValuesDoubleAggregator.SingleState state; + + private final List channels; + + public ValuesDoubleAggregatorFunction(DriverContext driverContext, List channels, + ValuesDoubleAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesDoubleAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesDoubleAggregatorFunction(driverContext, channels, ValuesDoubleAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(DoubleVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + + private void addRawBlock(DoubleBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesDoubleAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesDoubleAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..049deda37c460 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesDoubleAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesDoubleAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesDoubleAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesDoubleGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..11a0eb96c6a8e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.DOUBLE) ); + + private final ValuesDoubleAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesDoubleGroupingAggregatorFunction(List channels, + ValuesDoubleAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesDoubleGroupingAggregatorFunction(channels, ValuesDoubleAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesDoubleAggregator.combine(state, groupId, values.getDouble(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesDoubleAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesDoubleAggregator.GroupingState inState = ((ValuesDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java new file mode 100644 index 0000000000000..19e578936cd14 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.INT) ); + + private final DriverContext driverContext; + + private final ValuesIntAggregator.SingleState state; + + private final List channels; + + public ValuesIntAggregatorFunction(DriverContext driverContext, List channels, + ValuesIntAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesIntAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesIntAggregatorFunction(driverContext, channels, ValuesIntAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(IntVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesIntAggregator.combine(state, vector.getInt(i)); + } + } + + private void addRawBlock(IntBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesIntAggregator.combine(state, block.getInt(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesIntAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesIntAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..21402b5913813 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesIntAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesIntAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesIntAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesIntGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..67722cd1318c0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunction.java @@ -0,0 +1,193 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.INT) ); + + private final ValuesIntAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesIntGroupingAggregatorFunction(List channels, + ValuesIntAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesIntGroupingAggregatorFunction(channels, ValuesIntAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesIntAggregator.combine(state, groupId, values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesIntAggregator.combine(state, groupId, values.getInt(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesIntAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesIntAggregator.GroupingState inState = ((ValuesIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java new file mode 100644 index 0000000000000..420da87076a37 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java @@ -0,0 +1,120 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunction} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongAggregatorFunction implements AggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.LONG) ); + + private final DriverContext driverContext; + + private final ValuesLongAggregator.SingleState state; + + private final List channels; + + public ValuesLongAggregatorFunction(DriverContext driverContext, List channels, + ValuesLongAggregator.SingleState state) { + this.driverContext = driverContext; + this.channels = channels; + this.state = state; + } + + public static ValuesLongAggregatorFunction create(DriverContext driverContext, + List channels) { + return new ValuesLongAggregatorFunction(driverContext, channels, ValuesLongAggregator.initSingle(driverContext.bigArrays())); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public void addRawInput(Page page) { + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + } + + private void addRawVector(LongVector vector) { + for (int i = 0; i < vector.getPositionCount(); i++) { + ValuesLongAggregator.combine(state, vector.getLong(i)); + } + } + + private void addRawBlock(LongBlock block) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesLongAggregator.combine(state, block.getLong(i)); + } + } + } + + @Override + public void addIntermediateInput(Page page) { + assert channels.size() == intermediateBlockCount(); + assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + assert values.getPositionCount() == 1; + ValuesLongAggregator.combineIntermediate(state, values); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + state.toIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = ValuesLongAggregator.evaluateFinal(state, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a025bd0ade17a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionSupplier.java @@ -0,0 +1,38 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + public ValuesLongAggregatorFunctionSupplier(List channels) { + this.channels = channels; + } + + @Override + public ValuesLongAggregatorFunction aggregator(DriverContext driverContext) { + return ValuesLongAggregatorFunction.create(driverContext, channels); + } + + @Override + public ValuesLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return ValuesLongGroupingAggregatorFunction.create(channels, driverContext); + } + + @Override + public String describe() { + return "values of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..06508ce360ba4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunction.java @@ -0,0 +1,195 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link ValuesLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class ValuesLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("values", ElementType.LONG) ); + + private final ValuesLongAggregator.GroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + public ValuesLongGroupingAggregatorFunction(List channels, + ValuesLongAggregator.GroupingState state, DriverContext driverContext) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + } + + public static ValuesLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext) { + return new ValuesLongGroupingAggregatorFunction(channels, ValuesLongAggregator.initGrouping(driverContext.bigArrays()), driverContext); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + ValuesLongAggregator.combine(state, groupId, values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + ValuesLongAggregator.combine(state, groupId, values.getLong(groupPosition + positionOffset)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block valuesUncast = page.getBlock(channels.get(0)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + ValuesLongAggregator.combineIntermediate(state, groupId, values, groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + ValuesLongAggregator.GroupingState inState = ((ValuesLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + ValuesLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = ValuesLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java index de35965f52575..795207b245023 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesGroupingAggregatorFunction.java @@ -156,11 +156,31 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java index 86b2f15187af6..12c0f24ef43e3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesGroupingAggregatorFunction.java @@ -163,11 +163,31 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java index 0ccff1a1463ac..2447939d56db9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesGroupingAggregatorFunction.java @@ -156,11 +156,31 @@ private void addRawInput(int positionOffset, IntBlock groups, LongVector values) public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java index 30ef738669914..075f8749503b8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesGroupingAggregatorFunction.java @@ -163,11 +163,31 @@ private void addRawInput(int positionOffset, IntBlock groups, BytesRefVector val public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { state.enableGroupIdTracking(new SeenGroupIds.Empty()); assert channels.size() == intermediateBlockCount(); - DoubleVector xVal = page.getBlock(channels.get(0)).asVector(); - DoubleVector xDel = page.getBlock(channels.get(1)).asVector(); - DoubleVector yVal = page.getBlock(channels.get(2)).asVector(); - DoubleVector yDel = page.getBlock(channels.get(3)).asVector(); - LongVector count = page.getBlock(channels.get(4)).asVector(); + Block xValUncast = page.getBlock(channels.get(0)); + if (xValUncast.areAllValuesNull()) { + return; + } + DoubleVector xVal = ((DoubleBlock) xValUncast).asVector(); + Block xDelUncast = page.getBlock(channels.get(1)); + if (xDelUncast.areAllValuesNull()) { + return; + } + DoubleVector xDel = ((DoubleBlock) xDelUncast).asVector(); + Block yValUncast = page.getBlock(channels.get(2)); + if (yValUncast.areAllValuesNull()) { + return; + } + DoubleVector yVal = ((DoubleBlock) yValUncast).asVector(); + Block yDelUncast = page.getBlock(channels.get(3)); + if (yDelUncast.areAllValuesNull()) { + return; + } + DoubleVector yDel = ((DoubleBlock) yDelUncast).asVector(); + Block countUncast = page.getBlock(channels.get(4)); + if (countUncast.areAllValuesNull()) { + return; + } + LongVector count = ((LongBlock) countUncast).asVector(); assert xVal.getPositionCount() == xDel.getPositionCount() && xVal.getPositionCount() == yVal.getPositionCount() && xVal.getPositionCount() == yDel.getPositionCount() && xVal.getPositionCount() == count.getPositionCount(); for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { int groupId = Math.toIntExact(groups.getInt(groupPosition)); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java new file mode 100644 index 0000000000000..252436ad9634f --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ValuesBooleanAggregator.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * Aggregates field values for booleans. + */ +@Aggregator({ @IntermediateState(name = "values", type = "BOOLEAN_BLOCK") }) +@GroupingAggregator +class ValuesBooleanAggregator { + public static SingleState initSingle() { + return new SingleState(); + } + + public static void combine(SingleState state, boolean v) { + if (v) { + state.seenTrue = true; + } else { + state.seenFalse = true; + } + } + + public static void combineIntermediate(SingleState state, BooleanBlock values) { + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { + combine(state, values.getBoolean(i)); + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, boolean v) { + long index = ((long) groupId) << 1 | (v ? 1 : 0); + state.values.set(index); + } + + public static void combineIntermediate(GroupingState state, int groupId, BooleanBlock values, int valuesPosition) { + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { + combine(state, groupId, values.getBoolean(i)); + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { + long stateOffset = ((long) statePosition) << 1; + boolean seenFalse = state.values.get(stateOffset); + boolean seenTrue = state.values.get(stateOffset | 1); + + if (seenFalse) { + combine(current, currentGroupId, false); + } + if (seenTrue) { + combine(current, currentGroupId, true); + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { + private boolean seenFalse; + private boolean seenTrue; + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (seenFalse == false && seenTrue == false) { + return blockFactory.newConstantNullBlock(1); + } + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(2)) { + builder.beginPositionEntry(); + if (seenFalse) { + builder.appendBoolean(false); + } + if (seenTrue) { + builder.appendBoolean(true); + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() {} + } + + public static class GroupingState implements Releasable { + private final BitArray values; + + private GroupingState(BigArrays bigArrays) { + values = new BitArray(1, bigArrays); + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + long index = ((long) selectedGroup) << 1; + boolean seenFalse = values.get(index); + boolean seenTrue = values.get(index | 1); + if (seenFalse) { + if (seenTrue) { + builder.beginPositionEntry(); + builder.appendBoolean(false); + builder.appendBoolean(true); + builder.endPositionEntry(); + } else { + builder.appendBoolean(false); + } + } else { + if (seenTrue) { + builder.appendBoolean(true); + } else { + builder.appendNull(); + } + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we don't need to track which values have been seen because we don't do anything special for groups without values + } + + @Override + public void close() { + Releasables.closeExpectNoException(values); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st new file mode 100644 index 0000000000000..f9b15ccd34092 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ValuesAggregator.java.st @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +$if(BytesRef)$ +import org.apache.lucene.util.BytesRef; +$endif$ +import org.elasticsearch.common.util.BigArrays; +$if(BytesRef)$ +import org.elasticsearch.common.util.BytesRefHash; +$else$ +import org.elasticsearch.common.util.LongHash; +$endif$ +$if(long||double||BytesRef)$ +import org.elasticsearch.common.util.LongLongHash; +$endif$ +$if(BytesRef)$ +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +$endif$ +import org.elasticsearch.compute.ann.Aggregator; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +$if(int||double||BytesRef)$ +import org.elasticsearch.compute.data.$Type$Block; +$endif$ +import org.elasticsearch.compute.data.IntVector; +$if(long)$ +import org.elasticsearch.compute.data.LongBlock; +$endif$ +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +$if(BytesRef)$ +import org.elasticsearch.core.Releasables; + +$else$ + +$endif$ +/** + * Aggregates field values for $type$. + * This class is generated. Edit @{code X-ValuesAggregator.java.st} instead + * of this file. + */ +@Aggregator({ @IntermediateState(name = "values", type = "$TYPE$_BLOCK") }) +@GroupingAggregator +class Values$Type$Aggregator { + public static SingleState initSingle(BigArrays bigArrays) { + return new SingleState(bigArrays); + } + + public static void combine(SingleState state, $type$ v) { +$if(double)$ + state.values.add(Double.doubleToLongBits(v)); +$else$ + state.values.add(v); +$endif$ + } + + public static void combineIntermediate(SingleState state, $Type$Block values) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int start = values.getFirstValueIndex(0); + int end = start + values.getValueCount(0); + for (int i = start; i < end; i++) { +$if(BytesRef)$ + combine(state, values.getBytesRef(i, scratch)); +$else$ + combine(state, values.get$Type$(i)); +$endif$ + } + } + + public static Block evaluateFinal(SingleState state, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory()); + } + + public static GroupingState initGrouping(BigArrays bigArrays) { + return new GroupingState(bigArrays); + } + + public static void combine(GroupingState state, int groupId, $type$ v) { +$if(long)$ + state.values.add(groupId, v); +$elseif(double)$ + state.values.add(groupId, Double.doubleToLongBits(v)); +$elseif(BytesRef)$ + state.values.add(groupId, BlockHash.hashOrdToGroup(state.bytes.add(v))); +$elseif(int)$ + /* + * Encode the groupId and value into a single long - + * the top 32 bits for the group, the bottom 32 for the value. + */ + state.values.add((((long) groupId) << Integer.SIZE) | (v & 0xFFFFFFFFL)); +$endif$ + } + + public static void combineIntermediate(GroupingState state, int groupId, $Type$Block values, int valuesPosition) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + int start = values.getFirstValueIndex(valuesPosition); + int end = start + values.getValueCount(valuesPosition); + for (int i = start; i < end; i++) { +$if(BytesRef)$ + combine(state, groupId, values.getBytesRef(i, scratch)); +$else$ + combine(state, groupId, values.get$Type$(i)); +$endif$ + } + } + + public static void combineStates(GroupingState current, int currentGroupId, GroupingState state, int statePosition) { +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + for (int id = 0; id < state.values.size(); id++) { +$if(long||BytesRef)$ + if (state.values.getKey1(id) == statePosition) { + long value = state.values.getKey2(id); +$elseif(double)$ + if (state.values.getKey1(id) == statePosition) { + double value = Double.longBitsToDouble(state.values.getKey2(id)); +$elseif(int)$ + long both = state.values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == statePosition) { + int value = (int) both; +$endif$ + combine(current, currentGroupId, $if(BytesRef)$state.bytes.get(value, scratch)$else$value$endif$); + } + } + } + + public static Block evaluateFinal(GroupingState state, IntVector selected, DriverContext driverContext) { + return state.toBlock(driverContext.blockFactory(), selected); + } + + public static class SingleState implements Releasable { +$if(BytesRef)$ + private final BytesRefHash values; + +$else$ + private final LongHash values; + +$endif$ + private SingleState(BigArrays bigArrays) { +$if(BytesRef)$ + values = new BytesRefHash(1, bigArrays); +$else$ + values = new LongHash(1, bigArrays); +$endif$ + } + + void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory()); + } + + Block toBlock(BlockFactory blockFactory) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(1); + } +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + if (values.size() == 1) { +$if(long)$ + return blockFactory.newConstantLongBlockWith(values.get(0), 1); +$elseif(double)$ + return blockFactory.newConstantDoubleBlockWith(Double.longBitsToDouble(values.get(0)), 1); +$elseif(int)$ + return blockFactory.newConstantIntBlockWith((int) values.get(0), 1); +$elseif(BytesRef)$ + return blockFactory.newConstantBytesRefBlockWith(values.get(0, scratch), 1); +$endif$ + } + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder((int) values.size())) { + builder.beginPositionEntry(); + for (int id = 0; id < values.size(); id++) { +$if(long)$ + builder.appendLong(values.get(id)); +$elseif(double)$ + builder.appendDouble(Double.longBitsToDouble(values.get(id))); +$elseif(int)$ + builder.appendInt((int) values.get(id)); +$elseif(BytesRef)$ + builder.appendBytesRef(values.get(id, scratch)); +$endif$ + } + builder.endPositionEntry(); + return builder.build(); + } + } + + @Override + public void close() { + values.close(); + } + } + + /** + * State for a grouped {@code VALUES} aggregation. This implementation + * emphasizes collect-time performance over the performance of rendering + * results. That's good, but it's a pretty intensive emphasis, requiring + * an {@code O(n^2)} operation for collection to support a {@code O(1)} + * collector operation. But at least it's fairly simple. + */ + public static class GroupingState implements Releasable { +$if(long||double)$ + private final LongLongHash values; + +$elseif(BytesRef)$ + private final LongLongHash values; + private final BytesRefHash bytes; + +$elseif(int)$ + private final LongHash values; + +$endif$ + private GroupingState(BigArrays bigArrays) { +$if(long||double)$ + values = new LongLongHash(1, bigArrays); +$elseif(BytesRef)$ + values = new LongLongHash(1, bigArrays); + bytes = new BytesRefHash(1, bigArrays); +$elseif(int)$ + values = new LongHash(1, bigArrays); +$endif$ + } + + void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + blocks[offset] = toBlock(driverContext.blockFactory(), selected); + } + + Block toBlock(BlockFactory blockFactory, IntVector selected) { + if (values.size() == 0) { + return blockFactory.newConstantNullBlock(selected.getPositionCount()); + } +$if(BytesRef)$ + BytesRef scratch = new BytesRef(); +$endif$ + try ($Type$Block.Builder builder = blockFactory.new$Type$BlockBuilder(selected.getPositionCount())) { + for (int s = 0; s < selected.getPositionCount(); s++) { + int selectedGroup = selected.getInt(s); + /* + * Count can effectively be in three states - 0, 1, many. We use those + * states to buffer the first value, so we can avoid calling + * beginPositionEntry on single valued fields. + */ + int count = 0; + $if(BytesRef)$long$else$$type$$endif$ first = 0; + for (int id = 0; id < values.size(); id++) { +$if(long||BytesRef)$ + if (values.getKey1(id) == selectedGroup) { + long value = values.getKey2(id); +$elseif(double)$ + if (values.getKey1(id) == selectedGroup) { + double value = Double.longBitsToDouble(values.getKey2(id)); +$elseif(int)$ + long both = values.get(id); + int group = (int) (both >>> Integer.SIZE); + if (group == selectedGroup) { + int value = (int) both; +$endif$ + switch (count) { + case 0 -> first = value; + case 1 -> { + builder.beginPositionEntry(); + builder.append$Type$($if(BytesRef)$bytes.get(first, scratch)$else$first$endif$); + builder.append$Type$($if(BytesRef)$bytes.get(value, scratch)$else$value$endif$); + } + default -> builder.append$Type$($if(BytesRef)$bytes.get(value, scratch)$else$value$endif$); + } + count++; + } + } + switch (count) { + case 0 -> builder.appendNull(); + case 1 -> builder.append$Type$($if(BytesRef)$bytes.get(first, scratch)$else$first$endif$); + default -> builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seen) { + // we figure out seen values from nulls on the values block + } + + @Override + public void close() { +$if(BytesRef)$ + Releasables.closeExpectNoException(values, bytes); +$else$ + values.close(); +$endif$ + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java new file mode 100644 index 0000000000000..b5525b985be90 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesDoubleAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceDoubleBlockSourceOperator(blockFactory, IntStream.range(0, size).mapToDouble(i -> randomDouble())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of doubles"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToDouble(b -> allDoubles(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..4554a60b7a00c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesDoubleGroupingAggregatorFunctionTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesDoubleGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of doubles"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new LongDoubleTupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomDouble())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream() + .flatMapToDouble(p -> allDoubles(p, group)) + .boxed() + .collect(Collectors.toSet()) + .toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java new file mode 100644 index 0000000000000..9d421c7801a43 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesIntAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceIntBlockSourceOperator(blockFactory, IntStream.range(0, size).map(i -> randomInt())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of ints"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToInt(b -> allInts(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..831e2c1fdfd68 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesIntGroupingAggregatorFunctionTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.LongIntBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesIntGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of ints"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new LongIntBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomInt())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream().flatMapToInt(p -> allInts(p, group)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java new file mode 100644 index 0000000000000..e2a77bed4f4cd --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class ValuesLongAggregatorFunctionTests extends AggregatorFunctionTestCase { + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new SequenceLongBlockSourceOperator(blockFactory, LongStream.range(0, size).map(l -> randomLong())); + } + + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of longs"; + } + + @Override + public void assertSimpleOutput(List input, Block result) { + Object[] values = input.stream().flatMapToLong(b -> allLongs(b)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + assertThat((List) BlockUtils.toJavaObject(result, 0), containsInAnyOrder(values)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java new file mode 100644 index 0000000000000..ab667b959c7ae --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongGroupingAggregatorFunctionTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.TupleBlockSourceOperator; +import org.elasticsearch.core.Tuple; + +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class ValuesLongGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase { + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "values of longs"; + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + return new TupleBlockSourceOperator( + blockFactory, + LongStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), randomLong())) + ); + } + + @Override + public void assertSimpleGroup(List input, Block result, int position, Long group) { + Object[] values = input.stream().flatMapToLong(p -> allLongs(p, group)).boxed().collect(Collectors.toSet()).toArray(Object[]::new); + Object resultValue = BlockUtils.toJavaObject(result, position); + switch (values.length) { + case 0 -> assertThat(resultValue, nullValue()); + case 1 -> assertThat(resultValue, equalTo(values[0])); + default -> assertThat((List) resultValue, containsInAnyOrder(values)); + } + } +} diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index c4ac6923b4d02..0446a11240f6f 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -16,6 +16,7 @@ import org.junit.Before; import org.junit.ClassRule; +import java.io.IOException; import java.util.HashSet; import java.util.Set; @@ -64,7 +65,7 @@ public MixedClusterEsqlSpecIT(String fileName, String groupName, String testName } @Override - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, bwcVersion)); if (mode == ASYNC) { diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index e5e53f34df312..0ea445255f0d8 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -19,10 +19,13 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase; import org.elasticsearch.xpack.ql.CsvSpecReader; import org.elasticsearch.xpack.ql.CsvSpecReader.CsvTestCase; import org.elasticsearch.xpack.ql.SpecReader; +import org.junit.AfterClass; import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -34,6 +37,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -62,6 +66,9 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { @ClassRule public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + private static TestFeatureService remoteFeaturesService; + private static RestClient remoteFeaturesServiceClient; + @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { List urls = classpathResources("/*.csv-spec"); @@ -86,12 +93,34 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In } @Override - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); + for (String feature : testCase.requiredFeatures) { + assumeTrue("Test " + testName + " requires " + feature, remoteFeaturesService().clusterHasFeature(feature)); + } assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } + private TestFeatureService remoteFeaturesService() throws IOException { + if (remoteFeaturesService == null) { + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + remoteFeaturesServiceClient = super.buildClient(restAdminSettings(), remoteHosts); + var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var semanticNodeVersions = remoteNodeVersions.stream() + .map(ESRestTestCase::parseLegacyVersion) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteFeaturesServiceClient), semanticNodeVersions); + } + return remoteFeaturesService; + } + + @AfterClass + public static void closeRemoveFeaturesService() throws IOException { + IOUtils.close(remoteFeaturesServiceClient); + } + @Override protected String getTestRestCluster() { return localCluster.getHttpAddresses(); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 090941a9bc0be..349954450904d 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -134,7 +134,7 @@ public final void test() throws Throwable { } } - protected void shouldSkipTest(String testName) { + protected void shouldSkipTest(String testName) throws IOException { for (String feature : testCase.requiredFeatures) { assumeTrue("Test " + testName + " requires " + feature, clusterHasFeature(feature)); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 7641bd3305b1d..1406028b2c81f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -280,3 +280,60 @@ emp_no:integer | is_rehired:boolean | a1:boolean 10004 | true | true 10005 | [false,false,false,true] | false ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS still_hired=MV_SORT(VALUES(still_hired)) +; + + still_hired:boolean +[false, true] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS still_hired=MV_SORT(VALUES(still_hired)) BY first_letter +| SORT first_letter +; + +still_hired:boolean | first_letter:keyword + false | A + true | B + true | C + true | G + true | K + false | P + [false, true] | S + true | T +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS still_hired=MV_SORT(VALUES(still_hired)) BY job_positions +| SORT job_positions +; + +still_hired:boolean | job_positions:keyword + true | Accountant + true | Head Human Resources + [false, true] | Internship + true | Junior Developer + false | Principal Support Engineer + true | Purchase Manager + true | Reporting Analyst + [false, true] | Senior Python Developer + [false, true] | Senior Team Lead + true | Support Engineer + [false, true] | Tech Lead + [false, true] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 44e437b9683ce..7e52864d0e379 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -481,3 +481,60 @@ emp_no:integer | salary_change:double | sa:double | sd:double 10008 | [-2.92,0.75,3.54,12.68] | [-2.92,0.75,3.54,12.68] | [12.68,3.54,0.75,-2.92] 10009 | null | null | null ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS h=MV_SORT(VALUES(height)) +; + + h:double +[1.56, 1.7, 1.78, 1.83, 1.85, 2.03, 2.05, 2.08, 2.1] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS h=MV_SORT(VALUES(height)) BY first_letter +| SORT first_letter +; + + h:double | first_letter:keyword + 1.56 | A + 2.08 | B + 1.78 | C + 2.03 | G + 2.05 | K + 1.83 | P +[1.85, 2.1] | S + 1.70 | T +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS h=MV_SORT(VALUES(height)) BY job_positions +| SORT job_positions +; + + h:double | job_positions:keyword + 2.03 | Accountant + 1.78 | Head Human Resources + [1.85, 2.1] | Internship + 2.1 | Junior Developer + 1.56 | Principal Support Engineer + 2.1 | Purchase Manager + 1.78 | Reporting Analyst +[1.85, 2.03, 2.1] | Senior Python Developer + [1.56, 2.08] | Senior Team Lead + 1.78 | Support Engineer + [1.56, 1.78] | Tech Lead +[1.7, 1.83, 2.05] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index deb398661aa80..20f2e579643f2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -789,3 +789,174 @@ warning:Line 1:76: org.elasticsearch.xpack.ql.InvalidArgumentException: [1002000 x:long ; + +valuesLong +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.long)) +; + + l:long +[1, 2, 3, 4, 5] +; + +valuesLongGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages.long)) BY first_letter +| SORT first_letter +; + + l:long | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesLongGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.long)) BY job_positions +| SORT job_positions +; + + l:long | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; + +valuesInt +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages)) +; + + l:integer +[1, 2, 3, 4, 5] +; + +valuesIntGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages)) BY first_letter +| SORT first_letter +; + +l:integer | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesIntGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages)) BY job_positions +| SORT job_positions +; + + l:integer | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; + +valuesShort +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.short)) +; + + l:integer +[1, 2, 3, 4, 5] +; + +valuesShortGrouped +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS l=MV_SORT(VALUES(languages.short)) BY first_letter +| SORT first_letter +; + +l:integer | first_letter:keyword + 3 | A + 5 | B + 5 | C + 2 | G + 1 | K + 4 | P + [1, 2] | S + 4 | T +; + +valuesShortGroupedByOrdinals +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS l=MV_SORT(VALUES(languages.short)) BY job_positions +| SORT job_positions +; + + l:integer | job_positions:keyword + 2 | Accountant + 5 | Head Human Resources + [1, 2] | Internship + 2 | Junior Developer + 3 | Principal Support Engineer + 2 | Purchase Manager + 5 | Reporting Analyst + [1, 2] | Senior Python Developer + [3, 5] | Senior Team Lead + 5 | Support Engineer + [3, 5] | Tech Lead + [1, 4] | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index c77c0e6747e87..b83dda1376ac5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -361,3 +361,45 @@ epsilon | epsilon gw instance | [fe80::cae2:65ff:fece:feb9, fe80: epsilon | [epsilon host, epsilon2 host] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [epsilon host@@fe81::cae2:65ff:fece:feb9, epsilon2 host@@fe82::cae2:65ff:fece:fec0] epsilon | null | null | null ; + +values +required_feature: esql.agg_values + + FROM hosts +| STATS ip0=MV_SORT(VALUES(ip0)) +; + + ip0:ip +[::1, 127.0.0.1, fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM hosts +| EVAL host=SUBSTRING(host, 0, 1) +| STATS ip0=MV_SORT(VALUES(ip0)) BY host +| SORT host +; + + ip0:ip | host:keyword + [::1, 127.0.0.1] | a + 127.0.0.1 | b +[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | e +fe80::cae2:65ff:fece:feb9 | g +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM hosts +| STATS ip0=MV_SORT(VALUES(ip0)) BY host +| SORT host +; + + ip0:ip | host:keyword + [::1, 127.0.0.1] | alpha + 127.0.0.1 | beta +[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1, fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | epsilon +fe80::cae2:65ff:fece:feb9 | gamma +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 332533bd63b02..c254352431e86 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -99,6 +99,7 @@ to_upper |"keyword|text to_upper(str:keyword|text)" to_ver |"version to_ver(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false to_version |"version to_version(field:keyword|text|version)" |field |"keyword|text|version" | |version | "Converts an input string to a version value." |false |false | false trim |"keyword|text trim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" | "Removes leading and trailing whitespaces from a string." | false | false | false +values |"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" |field |"boolean|date|double|integer|ip|keyword|long|text|version" | |"boolean|date|double|integer|ip|keyword|long|text|version" |"Collect values for a field." |false |false |true ; @@ -202,6 +203,7 @@ double tau() "version to_ver(field:keyword|text|version)" "version to_version(field:keyword|text|version)" "keyword|text trim(string:keyword|text)" +"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" ; @@ -221,5 +223,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -96 | 96 | 96 +97 | 97 | 97 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5a44f16dd60af..1bd7860af1018 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1098,3 +1098,55 @@ row a = "π/2 + a + B + Λ ºC" | eval lower = to_lower(a), upper = to_upper(a) a:keyword | upper:keyword | lower:keyword π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc ; + +values +required_feature: esql.agg_values + + FROM employees +| WHERE emp_no <= 10009 +| STATS first_name=MV_SORT(VALUES(first_name)) +; + + first_name:keyword +[Anneke, Bezalel, Chirstian, Georgi, Kyoichi, Parto, Saniya, Sumant, Tzvetan] +; + +valuesGrouped +required_feature: esql.agg_values + +// tag::values-grouped[] + FROM employees +| EVAL first_letter = SUBSTRING(first_name, 0, 1) +| STATS first_name=MV_SORT(VALUES(first_name)) BY first_letter +| SORT first_letter +// end::values-grouped[] +; + +// tag::values-grouped-result[] + first_name:keyword | first_letter:keyword + [Alejandro, Amabile, Anneke, Anoosh, Arumugam] | A + [Basil, Berhard, Berni, Bezalel, Bojan, Breannda, Brendon] | B + [Charlene, Chirstian, Claudi, Cristinel] | C + [Danel, Divier, Domenick, Duangkaew] | D + [Ebbe, Eberhardt, Erez] | E + Florian | F + [Gao, Georgi, Georgy, Gino, Guoxiang] | G + [Heping, Hidefumi, Hilari, Hironobu, Hironoby, Hisao] | H + [Jayson, Jungsoon] | J + [Kazuhide, Kazuhito, Kendra, Kenroku, Kshitij, Kwee, Kyoichi] | K + [Lillian, Lucien] | L + [Magy, Margareta, Mary, Mayuko, Mayumi, Mingsen, Mokhtar, Mona, Moss] | M + Otmar | O + [Parto, Parviz, Patricio, Prasadram, Premal] | P + [Ramzi, Remzi, Reuven] | R +[Sailaja, Saniya, Sanjiv, Satosi, Shahaf, Shir, Somnath, Sreekrishna, Sudharsan, Sumant, Suzette] | S + [Tse, Tuval, Tzvetan] | T + [Udi, Uri] | U + [Valdiodio, Valter, Vishv] | V + Weiyi | W + Xinglin | X + [Yinghua, Yishay, Yongqiao] | Y + [Zhongwei, Zvonko] | Z + null | null +// end::values-grouped-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index df1fa6e67f279..c5e42186d976f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -310,3 +310,63 @@ v:v | version:v |version_text:s | id:i | m:i | g:v | i:v | c null | null | null | 13 | 0 | 1.3.0 | 0.1 | none null | null | null | 11 | 0 | 1.3.0 | 0.1 | none ; + +values +required_feature: esql.agg_values + + FROM apps +| STATS version=MV_SORT(VALUES(version)) +; + + version:version +[1, 1.2.3.4, 1.11.0, 2.1, 2.3.4, 2.12.0, 5.2.9-SNAPSHOT, 5.2.9, bad] +; + +valuesGrouped +required_feature: esql.agg_values + + FROM apps +| EVAL name=SUBSTRING(name, 0, 1) +| STATS version=MV_SORT(VALUES(version)) BY name +| SORT name +; + +version:version | name:keyword + [1, 1.2.3.4] | a + 2.1 | b + 2.3.4 | c + 2.12.0 | d + 1.11.0 | e + 5.2.9 | f + 5.2.9-SNAPSHOT | g + 1.2.3.4 | h + bad | i + 5.2.9 | j + null | k + null | l + 5.2.9 | m +; + +valuesGroupedByOrdinals +required_feature: esql.agg_values + + FROM apps +| STATS version=MV_SORT(VALUES(version)) BY name +| SORT name +; + +version:version | name:keyword + [1, 1.2.3.4] | aaaaa + 2.1 | bbbbb + 2.3.4 | ccccc + 2.12.0 | ddddd + 1.11.0 | eeeee + 5.2.9 | fffff + 5.2.9-SNAPSHOT | ggggg + 1.2.3.4 | hhhhh + bad | iiiii + 5.2.9 | jjjjj + null | kkkkk + null | lllll + 5.2.9 | mmmmm +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index 088e768684cf2..0379f1a5d3614 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; @@ -55,6 +56,12 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa GEO_SHAPE.typeName(), CARTESIAN_SHAPE.typeName() }; private static final String[] POINT_TYPE_NAMES = new String[] { GEO_POINT.typeName(), CARTESIAN_POINT.typeName() }; + private static final String[] NON_SPATIAL_TYPE_NAMES = EsqlDataTypes.types() + .stream() + .filter(EsqlDataTypes::isRepresentable) + .filter(t -> EsqlDataTypes.isSpatial(t) == false) + .map(DataType::esType) + .toArray(String[]::new); public static Expression.TypeResolution isSpatialPoint(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return isType(e, EsqlDataTypes::isSpatialPoint, operationName, paramOrd, POINT_TYPE_NAMES); @@ -63,4 +70,9 @@ public static Expression.TypeResolution isSpatialPoint(Expression e, String oper public static Expression.TypeResolution isSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return isType(e, EsqlDataTypes::isSpatial, operationName, paramOrd, SPATIAL_TYPE_NAMES); } + + public static Expression.TypeResolution isNotSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + return isType(e, t -> EsqlDataTypes.isSpatial(t) == false, operationName, paramOrd, NON_SPATIAL_TYPE_NAMES); + } + } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 6c3a1a7267da2..6f1f01bbe632c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; @@ -127,7 +128,8 @@ private FunctionDefinition[][] functions() { def(MedianAbsoluteDeviation.class, MedianAbsoluteDeviation::new, "median_absolute_deviation"), def(Min.class, Min::new, "min"), def(Percentile.class, Percentile::new, "percentile"), - def(Sum.class, Sum::new, "sum") }, + def(Sum.class, Sum::new, "sum"), + def(Values.class, Values::new, "values") }, // math new FunctionDefinition[] { def(Abs.class, Abs::new, "abs"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java new file mode 100644 index 0000000000000..d0d614a665794 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.planner.ToAggregator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; + +public class Values extends AggregateFunction implements ToAggregator { + @FunctionInfo( + returnType = { "boolean|date|double|integer|ip|keyword|long|text|version" }, + description = "Collect values for a field.", + isAggregation = true + ) + public Values( + Source source, + @Param(name = "field", type = { "boolean|date|double|integer|ip|keyword|long|text|version" }) Expression v + ) { + super(source, v); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Values::new, field()); + } + + @Override + public Values replaceChildren(List newChildren) { + return new Values(source(), newChildren.get(0)); + } + + @Override + public DataType dataType() { + return field().dataType(); + } + + @Override + protected TypeResolution resolveType() { + return EsqlTypeResolutions.isNotSpatial(field(), sourceText(), DEFAULT); + } + + @Override + public AggregatorFunctionSupplier supplier(List inputChannels) { + DataType type = field().dataType(); + if (type == DataTypes.INTEGER) { + return new ValuesIntAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.LONG) { + return new ValuesLongAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.DOUBLE) { + return new ValuesDoubleAggregatorFunctionSupplier(inputChannels); + } + if (DataTypes.isString(type) || type == DataTypes.IP || type == DataTypes.VERSION) { + return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels); + } + if (type == DataTypes.BOOLEAN) { + return new ValuesBooleanAggregatorFunctionSupplier(inputChannels); + } + // TODO cartesian_point, geo_point + throw EsqlIllegalArgumentException.illegalDataType(type); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 8cf6e165653e3..c20763e08f343 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -37,6 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; @@ -414,6 +415,7 @@ public static List namedTypeEntries() { of(AggregateFunction.class, Percentile.class, PlanNamedTypes::writePercentile, PlanNamedTypes::readPercentile), of(AggregateFunction.class, SpatialCentroid.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), of(AggregateFunction.class, Sum.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), + of(AggregateFunction.class, Values.class, PlanNamedTypes::writeAggFunction, PlanNamedTypes::readAggFunction), // Multivalue functions of(ScalarFunction.class, MvAvg.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), of(ScalarFunction.class, MvCount.class, PlanNamedTypes::writeMvFunction, PlanNamedTypes::readMvFunction), @@ -1670,7 +1672,8 @@ static void writeArithmeticOperation(PlanStreamOutput out, ArithmeticOperation a entry(name(Max.class), Max::new), entry(name(Median.class), Median::new), entry(name(MedianAbsoluteDeviation.class), MedianAbsoluteDeviation::new), - entry(name(SpatialCentroid.class), SpatialCentroid::new) + entry(name(SpatialCentroid.class), SpatialCentroid::new), + entry(name(Values.class), Values::new) ); static AggregateFunction readAggFunction(PlanStreamInput in, String name) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index f5cee225b1b13..6ed191a6df500 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Values; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; @@ -64,7 +65,8 @@ public class AggregateMapper { Min.class, Percentile.class, SpatialCentroid.class, - Sum.class + Sum.class, + Values.class ); /** Record of agg Class, type, and grouping (or non-grouping). */ @@ -148,6 +150,9 @@ private static Stream, Tuple>> typeAndNames(Class } else if (SpatialAggregateFunction.class.isAssignableFrom(clazz)) { types = SPATIAL; extraConfigs = List.of("SourceValues", "DocValues"); + } else if (Values.class.isAssignableFrom(clazz)) { + // TODO can't we figure this out from the function itself? + types = List.of("Int", "Long", "Double", "Boolean", "BytesRef"); } else { assert clazz == CountDistinct.class : "Expected CountDistinct, got: " + clazz; types = Stream.concat(NUMERIC.stream(), Stream.of("Boolean", "BytesRef")).toList(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index fb891b0c87a13..3b2c1e9d9a486 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -43,9 +43,12 @@ public class EsqlFeatures implements FeatureSpecification { // */ // private static final NodeFeature GEO_SHAPE_SUPPORT = new NodeFeature("esql.geo_shape"); - public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); + /** + * The introduction of the {@code VALUES} agg. + */ + private static final NodeFeature AGG_VALUES = new NodeFeature("esql.agg_values"); - private static final NodeFeature MV_LOAD = new NodeFeature("esql.mv_load"); + public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); @Override public Map getHistoricalFeatures() { @@ -61,6 +64,6 @@ public Map getHistoricalFeatures() { @Override public Set getFeatures() { - return Set.of(ASYNC_QUERY); + return Set.of(ASYNC_QUERY, AGG_VALUES); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index b451f2d4630bb..055561c747a63 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -15,6 +15,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('monitoring')) javaRestTestImplementation project(path: xpackModule('transform')) javaRestTestImplementation project(path: xpackModule('rank-rrf')) + javaRestTestImplementation project(path: xpackModule('ql')) javaRestTestImplementation project(path: xpackModule('esql')) } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index f9213a7fcaeb8..c4640bc17845a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -90,9 +90,11 @@ import org.elasticsearch.xpack.core.ml.notifications.NotificationsIndex; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.TokenMetadata; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.autoscaling.MlScalingReason; +import org.elasticsearch.xpack.ql.plugin.QlPlugin; import org.elasticsearch.xpack.slm.SnapshotLifecycle; import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; import org.elasticsearch.xpack.transform.Transform; @@ -154,7 +156,10 @@ protected Collection> nodePlugins() { SnapshotLifecycle.class, // The feature reset API touches transform custom cluster state so we need this plugin to understand it Transform.class, - DataStreamsPlugin.class + DataStreamsPlugin.class, + // ESQL and its dependency needed for node features + QlPlugin.class, + EsqlPlugin.class ); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml index 4727a5394cf3d..8a9004d372d17 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml @@ -131,6 +131,14 @@ geo_point unsortable with limit from row: body: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) | limit 5 | sort pt' +--- +values unsupported for geo_point: + - do: + catch: '/.+argument of \[VALUES\(location\)\] must be \[boolean, date, double, integer, ip, keyword, long, null, text, unsigned_long or version\].+/' + esql.query: + body: + query: 'FROM geo_points | STATS VALUES(location)' + --- cartesian_point: - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index cef7f88506de8..09462691688bf 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -4,6 +4,7 @@ setup: version: " - 8.10.99" reason: "ESQL is available in 8.11+" features: allowed_warnings_regex + - do: indices.create: index: test @@ -441,3 +442,38 @@ setup: - length: { values: 2 } - match: { values.0.0: "IT Director" } - match: { values.1.0: "Payroll Specialist" } + +--- +values: + - requires: + cluster_features: esql.value_agg + reason: "values is available in 8.14+" + + - do: + esql.query: + body: + query: 'FROM test | STATS job = VALUES(job) | LIMIT 1' + - match: { columns.0.name: "job" } + - match: { columns.0.type: "text" } + - length: { values: 1 } + - match: { values.0: [ [ "IT Director", "Payroll Specialist" ] ] } + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "emp_no": 30, "name": "Stuff", "job": "Other", "tag": "baz" } + + - do: + esql.query: + body: + query: 'FROM test | STATS job = VALUES(job) BY tag | SORT tag | LIMIT 10' + - match: { columns.0.name: "job" } + - match: { columns.0.type: "text" } + - match: { columns.1.name: "tag" } + - match: { columns.1.type: "text" } + - length: { values: 2 } + - match: { values.0: [ [ "Payroll Specialist", "Other" ], "baz" ] } + - match: { values.1: [ "IT Director", "foo bar" ] } From a41e11f034e41c8c0045223adbdb87eec4cce0a1 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 21 Mar 2024 09:52:12 -0700 Subject: [PATCH 103/214] Improve stability of build complete archive creation (#106578) This is an attempt to fix some errors creating the diagnostic archive at the end of builds in CI. We've seen errors where we attempt to write more bytes than expected into the archive. The assumption here is that we are bundling files that are possibly still being written to. This _shouldn't_ happen, but it's useful to have these diagnostic bundles in either case. Closes https://github.com/elastic/elasticsearch/issues/106576 --- .../internal/ElasticsearchBuildCompletePlugin.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index e0588ed440c57..d342ebc435197 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -28,7 +28,12 @@ import org.gradle.api.tasks.Input; import org.jetbrains.annotations.NotNull; -import java.io.*; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; @@ -210,12 +215,15 @@ private static void createBuildArchiveTar(List files, File projectDir, Fil throw new IOException("Support only file!"); } + long entrySize = Files.size(path); TarArchiveEntry tarEntry = new TarArchiveEntry(path.toFile(), calculateArchivePath(path, projectPath)); - tarEntry.setSize(Files.size(path)); + tarEntry.setSize(entrySize); tOut.putArchiveEntry(tarEntry); // copy file to TarArchiveOutputStream - Files.copy(path, tOut); + try (BufferedInputStream bin = new BufferedInputStream(Files.newInputStream(path))) { + IOUtils.copyLarge(bin, tOut, 0, entrySize); + } tOut.closeArchiveEntry(); } From 9c8c31952e5e87db0925c144af0ef0a3754bc766 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 21 Mar 2024 17:53:29 +0100 Subject: [PATCH 104/214] Refactor ESQL HeapAttackIT to run on Serverless (#106586) --- .../xpack/esql/heap_attack/Clusters.java | 30 +++++++++++++++++++ .../xpack/esql/heap_attack/HeapAttackIT.java | 19 +----------- .../RestTriggerOutOfMemoryAction.java | 3 ++ 3 files changed, 34 insertions(+), 18 deletions(-) create mode 100644 test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java new file mode 100644 index 0000000000000..fbc191a12d8b0 --- /dev/null +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/Clusters.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xpack.esql.heap_attack; + +import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; + +public class Clusters { + static ElasticsearchCluster buildCluster() { + var spec = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .nodes(2) + .module("test-esql-heap-attack") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial"); + String javaVersion = JvmInfo.jvmInfo().version(); + if (javaVersion.equals("20") || javaVersion.equals("21")) { + // see https://github.com/elastic/elasticsearch/issues/99592 + spec.jvmArg("-XX:+UnlockDiagnosticVMOptions -XX:+G1UsePreventiveGC"); + } + return spec.build(); + } +} diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 8c87ef5977114..2f3826f8423b8 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -21,10 +21,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.TestThreadPool; @@ -60,25 +58,10 @@ public class HeapAttackIT extends ESRestTestCase { @ClassRule - public static ElasticsearchCluster cluster = buildCluster(); + public static ElasticsearchCluster cluster = Clusters.buildCluster(); static volatile boolean SUITE_ABORTED = false; - static ElasticsearchCluster buildCluster() { - var spec = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .nodes(2) - .module("test-esql-heap-attack") - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial"); - String javaVersion = JvmInfo.jvmInfo().version(); - if (javaVersion.equals("20") || javaVersion.equals("21")) { - // see https://github.com/elastic/elasticsearch/issues/99592 - spec.jvmArg("-XX:+UnlockDiagnosticVMOptions -XX:+G1UsePreventiveGC"); - } - return spec.build(); - } - @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); diff --git a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java index d0a146edde765..f23646df081c1 100644 --- a/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java +++ b/test/external-modules/esql-heap-attack/src/main/java/org/elasticsearch/test/esql/heap_attack/RestTriggerOutOfMemoryAction.java @@ -20,12 +20,15 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import java.util.ArrayList; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +@ServerlessScope(Scope.PUBLIC) public class RestTriggerOutOfMemoryAction extends BaseRestHandler { private static final Logger LOGGER = LogManager.getLogger(RestTriggerOutOfMemoryAction.class); From 25d586c08c183aa32276d428609c236984aa86c9 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 21 Mar 2024 18:59:27 +0200 Subject: [PATCH 105/214] Make testXContentSerializationWithRolloverAndEffectiveRetention take into consideration a disabled lifecycle (#106612) --- .../cluster/metadata/DataStreamLifecycleTests.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java index d389131e6b294..fc650a5e65909 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java @@ -132,7 +132,11 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws } else { assertThat(serialized, containsString("data_retention")); } - assertThat(serialized, containsString("effective_retention")); + if (lifecycle.isEnabled()) { + assertThat(serialized, containsString("effective_retention")); + } else { + assertThat(serialized, not(containsString("effective_retention"))); + } } } From 7c46c735e4f29fee21d01cebb012e4831e4c7ca4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Mar 2024 13:01:15 -0400 Subject: [PATCH 106/214] ESQL: Generate docs for ceil (#106616) This replaces the hand maintained docs for `CEIL` and with the docs generated by the tests. There shouldn't be any diff in the generated docs. --- docs/reference/esql/functions/ceil.asciidoc | 35 ------------------- .../esql/functions/description/ceil.asciidoc | 2 ++ .../esql/functions/examples/ceil.asciidoc | 13 +++++++ .../esql/functions/layout/ceil.asciidoc | 1 + .../esql/functions/math-functions.asciidoc | 2 +- .../esql/functions/parameters/ceil.asciidoc | 2 +- .../elasticsearch/xpack/esql/CsvAssert.java | 16 ++++++--- .../src/main/resources/meta.csv-spec | 2 +- .../expression/function/FunctionInfo.java | 15 ++++++++ .../expression/function/scalar/math/Ceil.java | 18 ++++++++-- .../function/AbstractFunctionTestCase.java | 10 ++++-- 11 files changed, 69 insertions(+), 47 deletions(-) delete mode 100644 docs/reference/esql/functions/ceil.asciidoc create mode 100644 docs/reference/esql/functions/examples/ceil.asciidoc diff --git a/docs/reference/esql/functions/ceil.asciidoc b/docs/reference/esql/functions/ceil.asciidoc deleted file mode 100644 index ab163138821b1..0000000000000 --- a/docs/reference/esql/functions/ceil.asciidoc +++ /dev/null @@ -1,35 +0,0 @@ -[discrete] -[[esql-ceil]] -=== `CEIL` - -*Syntax* - -[.text-center] -image::esql/functions/signature/ceil.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Round a number up to the nearest integer. - -NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the closest `double` value to the integer - similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. - -include::types/ceil.asciidoc[] - - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=ceil] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=ceil-result] -|=== diff --git a/docs/reference/esql/functions/description/ceil.asciidoc b/docs/reference/esql/functions/description/ceil.asciidoc index db4456896b47b..b39a4c81df95c 100644 --- a/docs/reference/esql/functions/description/ceil.asciidoc +++ b/docs/reference/esql/functions/description/ceil.asciidoc @@ -3,3 +3,5 @@ *Description* Round a number up to the nearest integer. + +NOTE: This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. diff --git a/docs/reference/esql/functions/examples/ceil.asciidoc b/docs/reference/esql/functions/examples/ceil.asciidoc new file mode 100644 index 0000000000000..7404eecbbfe0f --- /dev/null +++ b/docs/reference/esql/functions/examples/ceil.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=ceil] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=ceil-result] +|=== + diff --git a/docs/reference/esql/functions/layout/ceil.asciidoc b/docs/reference/esql/functions/layout/ceil.asciidoc index 480aeb759936d..2e49dc848ae59 100644 --- a/docs/reference/esql/functions/layout/ceil.asciidoc +++ b/docs/reference/esql/functions/layout/ceil.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/ceil.svg[Embedded,opts=inline] include::../parameters/ceil.asciidoc[] include::../description/ceil.asciidoc[] include::../types/ceil.asciidoc[] +include::../examples/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 4e1aa3c6cc757..5faf994d61db6 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -36,7 +36,7 @@ include::layout/acos.asciidoc[] include::layout/asin.asciidoc[] include::layout/atan.asciidoc[] include::layout/atan2.asciidoc[] -include::ceil.asciidoc[] +include::layout/ceil.asciidoc[] include::cos.asciidoc[] include::cosh.asciidoc[] include::e.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc index 9faa6c1adebe2..8527c7f74bb09 100644 --- a/docs/reference/esql/functions/parameters/ceil.asciidoc +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -1,4 +1,4 @@ *Parameters* `number`:: - +Numeric expression. If `null`, the function returns `null`. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 7d5dc206037cc..0b7e39608b9aa 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -263,10 +263,18 @@ public static void assertData( private static void dataFailure(List dataFailures) { fail("Data mismatch:\n" + dataFailures.stream().map(f -> { Description description = new StringDescription(); - ListMatcher expected = f.expected instanceof List - ? ListMatcher.matchesList().item(f.expected) - : ListMatcher.matchesList((List) f.expected); - List actualList = f.actual instanceof List ? List.of(f.actual) : (List) f.actual; + ListMatcher expected; + if (f.expected instanceof List e) { + expected = ListMatcher.matchesList(e); + } else { + expected = ListMatcher.matchesList().item(f.expected); + } + List actualList; + if (f.actual instanceof List a) { + actualList = a; + } else { + actualList = List.of(f.actual); + } expected.describeMismatch(actualList, description); String prefix = "row " + f.row + " column " + f.column + ":"; return prefix + description.toString().replace("\n", "\n" + prefix); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index c254352431e86..cd94ae793516e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -11,7 +11,7 @@ atan2 |"double atan2(y_coordinate:double|integer|long|unsigne auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date|string, to:integer|long|double|date|string)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date|string", "integer|long|double|date|string"] |["", "", "", ""] | "double|date" | "Creates human-friendly buckets and returns a datetime value for each row that corresponds to the resulting bucket the row falls into." | [false, false, false, false] | false | false avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false -ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false +ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index ac0d3bea422b0..5c3c9423f32ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -18,10 +18,25 @@ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.CONSTRUCTOR) public @interface FunctionInfo { + /** + * The type(s) this function returns. + */ String[] returnType(); + /** + * The description of the function rendered in {@code META FUNCTIONS} + * and the docs. + */ String description() default ""; + /** + * A {@code NOTE} that's added after the {@link #description} in the docs. + */ + String note() default ""; + + /** + * Is this an aggregation (true) or a scalar function (false). + */ boolean isAggregation() default false; /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 42f8a67ceead1..3ab9b1fc2cb1a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -30,8 +31,21 @@ *

    */ public class Ceil extends UnaryScalarFunction { - @FunctionInfo(returnType = { "double", "integer", "long", "unsigned_long" }, description = "Round a number up to the nearest integer.") - public Ceil(Source source, @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }) Expression n) { + @FunctionInfo( + returnType = { "double", "integer", "long", "unsigned_long" }, + description = "Round a number up to the nearest integer.", + note = "This is a noop for `long` (including unsigned) and `integer`. For `double` this picks the closest `double` value to " + + "the integer similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil].", + examples = @Example(file = "math", tag = "ceil") + ) + public Ceil( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long", "unsigned_long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression n + ) { super(source, n); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index d4aba4bf47902..61d62d6f54344 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1102,8 +1102,9 @@ public static void renderDocs() throws IOException { EsqlFunctionRegistry.FunctionDescription description = EsqlFunctionRegistry.description(definition); renderTypes(description.argNames()); renderParametersList(description.argNames(), description.argDescriptions()); - renderDescription(description.description()); - boolean hasExamples = renderExamples(EsqlFunctionRegistry.functionInfo(definition)); + FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); + renderDescription(description.description(), info.note()); + boolean hasExamples = renderExamples(info); renderFullLayout(name, hasExamples); return; } @@ -1155,11 +1156,14 @@ private static void renderParametersList(List argNames, List arg writeToTempDir("parameters", rendered, "asciidoc"); } - private static void renderDescription(String description) throws IOException { + private static void renderDescription(String description, String note) throws IOException { String rendered = DOCS_WARNING + """ *Description* """ + description + "\n"; + if (note != null) { + rendered += "\nNOTE: " + note + "\n"; + } LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); writeToTempDir("description", rendered, "asciidoc"); } From 47b6eb39e5e68fd58a83a9c9f47a72477ec3438e Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 21 Mar 2024 18:12:22 +0100 Subject: [PATCH 107/214] Re-mute SimpleThreadPoolIT.testThreadPoolMetrics (#106623) Mute test until remaining races are addressed. (related to #104652) --- .../java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 50d5dbdeca71b..fa6126c13c741 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -117,8 +117,7 @@ public void testThreadNames() throws Exception { } } - // temporarily re-enable to gather more data on test failures likely caused by diverging thread pool stats - // at the time stats are collected vs when measurements are taken. + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104652") public void testThreadPoolMetrics() throws Exception { internalCluster().startNode(); From edefbbc181a060e813d711e78fd9f72286e796d2 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Thu, 21 Mar 2024 17:15:52 +0000 Subject: [PATCH 108/214] Install the stack templates only if DSL available (#106500) Data stream lifecycle has been made available only in 8.11 however we now have a stack template that makes use of it (`.kibana-reporting`). In an 8.10-8.14 mixed cluster environment we could cause a cluster state corruption if we install the `kibana-reporting` (with `lifecycle:{}`) template before all nodes _know_ of data stream lifecycle (8.10 does not). This changes the `StackTemplateRegistry` to wait for all nodes in the cluster to be aware of data stream lifecycle before installing the index templates. --- .../datastreams/DataStreamFeatures.java | 9 +++++++++ x-pack/plugin/stack/build.gradle | 1 + .../xpack/stack/StackTemplateRegistry.java | 13 ++++++++----- .../xpack/stack/StackTemplateRegistryTests.java | 13 ++++++++++--- 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 06dc8919360f8..6e17964aa179a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -8,12 +8,14 @@ package org.elasticsearch.datastreams; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.datastreams.autosharding.DataStreamAutoShardingService; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import java.util.Map; import java.util.Set; /** @@ -21,6 +23,13 @@ */ public class DataStreamFeatures implements FeatureSpecification { + public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + + @Override + public Map getHistoricalFeatures() { + return Map.of(DATA_STREAM_LIFECYCLE, Version.V_8_11_0); + } + @Override public Set getFeatures() { return Set.of( diff --git a/x-pack/plugin/stack/build.gradle b/x-pack/plugin/stack/build.gradle index 6b26373c40544..c4b950ad9cb59 100644 --- a/x-pack/plugin/stack/build.gradle +++ b/x-pack/plugin/stack/build.gradle @@ -16,6 +16,7 @@ base { dependencies { compileOnly project(path: xpackModule('core')) + testImplementation project(':modules:data-streams') javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation project(path: ':x-pack:plugin:stack') clusterModules project(':modules:mapper-extras') diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 1eaf224083c87..b21e8c0c15811 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -38,9 +38,13 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(StackTemplateRegistry.class); - // Current version of the registry requires all nodes to be at least 8.9.0. + // Historical node feature kept here as LegacyStackTemplateRegistry is deprecated public static final NodeFeature STACK_TEMPLATES_FEATURE = new NodeFeature("stack.templates_supported"); + // this node feature is a redefinition of {@link DataStreamFeatures#DATA_STREAM_LIFECYCLE} and it's meant to avoid adding a + // dependency to the data-streams module just for this + public static final NodeFeature DATA_STREAM_LIFECYCLE = new NodeFeature("data_stream.lifecycle"); + // The stack template registry version. This number must be incremented when we make changes // to built-in templates. public static final int REGISTRY_VERSION = 8; @@ -326,9 +330,8 @@ protected boolean requiresMasterNode() { @Override protected boolean isClusterReady(ClusterChangedEvent event) { - // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. - // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property - // in the pipeline processor, which has been introduced only in 8.9.0 - return featureService.clusterHasFeature(event.state(), STACK_TEMPLATES_FEATURE); + // Ensure current version of the components are installed only after versions that support data stream lifecycle + // due to .kibana-reporting making use of the feature + return featureService.clusterHasFeature(event.state(), DATA_STREAM_LIFECYCLE); } } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 2caa820c51645..782fe3b41ae3b 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.DataStreamFeatures; import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; @@ -70,6 +71,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -87,7 +89,7 @@ public void createRegistryAndClient() { threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); clusterService = ClusterServiceUtils.createClusterService(threadPool); - featureService = new FeatureService(List.of(new StackTemplatesFeatures())); + featureService = new FeatureService(List.of(new StackTemplatesFeatures(), new DataStreamFeatures())); registry = new StackTemplateRegistry( Settings.EMPTY, clusterService, @@ -504,7 +506,7 @@ public void testThatMissingMasterNodeDoesNothing() { public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); - DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_8_0); + DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_10_0); DiscoveryNodes nodes = DiscoveryNodes.builder() .localNodeId("updatedNode") .masterNodeId("updatedNode") @@ -513,7 +515,7 @@ public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { .build(); client.setVerifier((a, r, l) -> { - fail("if some cluster mode are not updated to at least v.8.9.0 nothing should happen"); + fail("if some cluster mode are not updated to at least v.8.11.0 nothing should happen"); return null; }); @@ -538,6 +540,11 @@ public void testThatTemplatesAreNotDeprecated() { .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } + public void testDataStreamLifecycleNodeFeatureId() { + // let's make sure these ids remain in-sync + assertThat(StackTemplateRegistry.DATA_STREAM_LIFECYCLE.id(), is(DataStreamFeatures.DATA_STREAM_LIFECYCLE.id())); + } + // ------------- /** From 3f8cc1e8e7dc69a82fa30cbe6cab9a220344b2bd Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Mar 2024 11:19:12 -0700 Subject: [PATCH 109/214] Add test sourceset support to MRJAR plugin (#106571) Mrjar plugin supports main sourcesets automatically configured with newer java versions. This commit adds support for test sourcesets as well. The key difference between main and test is that test sourcesets also extend their main counterparts. --- .../gradle/internal/MrjarPlugin.java | 118 ++++++++++++------ 1 file changed, 81 insertions(+), 37 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index adf84b63d8689..9e2f44323f914 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -60,48 +60,30 @@ public void apply(Project project) { project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); - var srcDir = project.getProjectDir().toPath().resolve("src"); - List mainVersions = new ArrayList<>(); - try (var subdirStream = Files.list(srcDir)) { - for (Path sourceset : subdirStream.toList()) { - assert Files.isDirectory(sourceset); - String sourcesetName = sourceset.getFileName().toString(); - Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); - if (sourcesetMatcher.matches()) { - mainVersions.add(Integer.parseInt(sourcesetMatcher.group(1))); - } - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - - Collections.sort(mainVersions); - List parentSourceSets = new ArrayList<>(); - parentSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List mainVersions = findSourceVersions(project); + List mainSourceSets = new ArrayList<>(); + mainSourceSets.add(SourceSet.MAIN_SOURCE_SET_NAME); + List testSourceSets = new ArrayList<>(mainSourceSets); + testSourceSets.add(SourceSet.TEST_SOURCE_SET_NAME); for (int javaVersion : mainVersions) { - String sourcesetName = "main" + javaVersion; - addMrjarSourceset(project, javaExtension, sourcesetName, parentSourceSets, javaVersion); - parentSourceSets.add(sourcesetName); + String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; + SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); + configureSourceSetInJar(project, mainSourceSet, javaVersion); + mainSourceSets.add(mainSourceSetName); + testSourceSets.add(mainSourceSetName); + + String testSourceSetName = SourceSet.TEST_SOURCE_SET_NAME + javaVersion; + SourceSet testSourceSet = addSourceSet(project, javaExtension, testSourceSetName, testSourceSets, javaVersion); + testSourceSets.add(testSourceSetName); + createTestTask(project, testSourceSet, javaVersion, mainSourceSets); } - } - private void addMrjarSourceset( - Project project, - JavaPluginExtension javaExtension, - String sourcesetName, - List parentSourceSets, - int javaVersion - ) { - SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourcesetName); - for (String parentSourceSetName : parentSourceSets) { - GradleUtils.extendSourceSet(project, parentSourceSetName, sourcesetName); - } + configureMrjar(project); + } + private void configureMrjar(Project project) { var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); - jarTask.configure(task -> { - task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput())); - task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); - }); + jarTask.configure(task -> { task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); }); project.getTasks().withType(Test.class).named(JavaPlugin.TEST_TASK_NAME).configure(testTask -> { testTask.dependsOn(jarTask); @@ -111,6 +93,19 @@ private void addMrjarSourceset( FileCollection testRuntime = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath(); testTask.setClasspath(testRuntime.minus(mainRuntime).plus(project.files(jarTask))); }); + } + + private SourceSet addSourceSet( + Project project, + JavaPluginExtension javaExtension, + String sourceSetName, + List parentSourceSets, + int javaVersion + ) { + SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourceSetName); + for (String parentSourceSetName : parentSourceSets) { + GradleUtils.extendSourceSet(project, parentSourceSetName, sourceSetName); + } project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { compileTask.getJavaCompiler() @@ -132,6 +127,55 @@ private void addMrjarSourceset( project.getTasks().withType(CheckForbiddenApisTask.class).named(forbiddenApisTaskName).configure(forbiddenApisTask -> { forbiddenApisTask.setIgnoreMissingClasses(true); }); + + return sourceSet; + } + + private void configureSourceSetInJar(Project project, SourceSet sourceSet, int javaVersion) { + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); + jarTask.configure(task -> task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput()))); + } + + private void createTestTask(Project project, SourceSet sourceSet, int javaVersion, List mainSourceSets) { + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); + var testTaskProvider = project.getTasks().register(JavaPlugin.TEST_TASK_NAME + javaVersion, Test.class); + testTaskProvider.configure(testTask -> { + testTask.dependsOn(jarTask); + + SourceSetContainer sourceSets = GradleUtils.getJavaSourceSets(project); + FileCollection testRuntime = sourceSet.getRuntimeClasspath(); + for (String mainSourceSetName : mainSourceSets) { + FileCollection mainRuntime = sourceSets.getByName(mainSourceSetName).getOutput(); + testRuntime = testRuntime.minus(mainRuntime); + } + testTask.setClasspath(testRuntime.plus(project.files(jarTask))); + testTask.setTestClassesDirs(sourceSet.getOutput().getClassesDirs()); + + testTask.getJavaLauncher() + .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + }); + + project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); + } + + private static List findSourceVersions(Project project) { + var srcDir = project.getProjectDir().toPath().resolve("src"); + List versions = new ArrayList<>(); + try (var subdirStream = Files.list(srcDir)) { + for (Path sourceSetPath : subdirStream.toList()) { + assert Files.isDirectory(sourceSetPath); + String sourcesetName = sourceSetPath.getFileName().toString(); + Matcher sourcesetMatcher = MRJAR_SOURCESET_PATTERN.matcher(sourcesetName); + if (sourcesetMatcher.matches()) { + versions.add(Integer.parseInt(sourcesetMatcher.group(1))); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + + Collections.sort(versions); + return versions; } private static void stripPreviewFromFiles(Path compileDir) { From e5b604062cb1364fa93cbc28bc17265dd086b47c Mon Sep 17 00:00:00 2001 From: caichangheng Date: Fri, 22 Mar 2024 02:31:50 +0800 Subject: [PATCH 110/214] Check preTags and postTags params for empty values (#106396) Check highlighter params of pre_tags and post_tags params for empty values. If empty throw IllegalArgumentException. Closes #69009 --- docs/changelog/106396.yaml | 6 ++ .../test/search.highlight/issue69009.yml | 72 +++++++++++++++++++ .../highlight/AbstractHighlighterBuilder.java | 3 + .../highlight/HighlightBuilderTests.java | 4 +- 4 files changed, 83 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/106396.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml diff --git a/docs/changelog/106396.yaml b/docs/changelog/106396.yaml new file mode 100644 index 0000000000000..7aa06566c75e7 --- /dev/null +++ b/docs/changelog/106396.yaml @@ -0,0 +1,6 @@ +pr: 106396 +summary: "Check preTags and postTags params for empty values" +area: Highlighting +type: bug +issues: + - 69009 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml new file mode 100644 index 0000000000000..8b2f2f90dd0ee --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml @@ -0,0 +1,72 @@ +setup: + - skip: + version: ' - 8.13.99' + reason: 'check of preTags and postTags params for empty values was added in 8.14' + + - do: + indices.create: + index: test + body: + mappings: + "properties": + "text": + "type": "text" + "term_vector": "with_positions_offsets" + + - do: + index: + index: test + id: "1" + body: + "text" : "The quick brown fox is brown." + - do: + indices.refresh: {} + +--- +"Test with empty pre_tags or post_tags in query body with unified highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "fox" } }, + "highlight": { + "type": "unified", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + +--- +"Test with empty pre_tags or post_tags in query body with plain highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "fox" } }, + "highlight": { + "type": "plain", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + +--- +"Test with empty pre_tags or post_tags in query body with fvh highlight type - should fail" : + - do: + catch: /pre_tags or post_tags must not be empty/ + search: + index: test + body: { + "query": { "match": { "fox" } }, + "highlight": { + "type": "fvh", + "fields": { "*": { } }, + "pre_tags": [ ], + "post_tags": [ ] + }, + } + diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index d91b6e8b4e4a3..80cc459569dea 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -643,6 +643,9 @@ static > BiFunction Date: Thu, 21 Mar 2024 14:32:49 -0700 Subject: [PATCH 111/214] Revert "Avoid using small inactive exchange timeout in breaker tests (#106394)" This reverts commit bce11c6c1c1c73a4b9a57b367cc6d379b9acac1b. --- .../compute/operator/exchange/ExchangeService.java | 6 +----- .../xpack/esql/action/EsqlActionBreakerIT.java | 4 +++- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 9af08346256f7..a8afce1a3b223 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -25,9 +25,7 @@ import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; @@ -180,15 +178,13 @@ public void messageReceived(OpenExchangeRequest request, TransportChannel channe private class ExchangeTransportAction implements TransportRequestHandler { @Override - public void messageReceived(ExchangeRequest request, TransportChannel channel, Task transportTask) { + public void messageReceived(ExchangeRequest request, TransportChannel channel, Task task) { final String exchangeId = request.exchangeId(); ActionListener listener = new ChannelActionListener<>(channel); final ExchangeSinkHandler sinkHandler = sinks.get(exchangeId); if (sinkHandler == null) { listener.onResponse(new ExchangeResponse(blockFactory, null, true)); } else { - CancellableTask task = (CancellableTask) transportTask; - task.addListener(() -> sinkHandler.onFailure(new TaskCancelledException(task.getReasonCancelled()))); sinkHandler.fetchPageAsync(request.sourcesFinished(), listener); } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 059ed672e56c6..85eb0c02625ad 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.action; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.DocWriteResponse; @@ -34,6 +35,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105543") @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class EsqlActionBreakerIT extends EsqlActionIT { @@ -70,7 +72,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getKey(), HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) - .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueSeconds(between(5, 10))) + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker From 2b67444a465ca50a6854c36025fd7f23aac5a117 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Mar 2024 15:02:26 -0700 Subject: [PATCH 112/214] Wait indefintely for http connections on shutdown by default (#106511) This commit changes the meaning of the http shutdown timeout when set to 0. Previously the shutdown would proceed immediately, killing active http connections. With this change 0 means no timeout is used, but active connections are still waited on, indefinitely. This new behavior is what was originally intended for the default value of the shutdown timeout setting. --- docs/changelog/106511.yaml | 5 ++ .../common/settings/ClusterSettings.java | 1 + .../http/AbstractHttpServerTransport.java | 34 ++++++++-- .../http/HttpTransportSettings.java | 6 ++ .../AbstractHttpServerTransportTests.java | 63 +++++++++++++++---- 5 files changed, 91 insertions(+), 18 deletions(-) create mode 100644 docs/changelog/106511.yaml diff --git a/docs/changelog/106511.yaml b/docs/changelog/106511.yaml new file mode 100644 index 0000000000000..bdef7f1aea225 --- /dev/null +++ b/docs/changelog/106511.yaml @@ -0,0 +1,5 @@ +pr: 106511 +summary: Wait indefintely for http connections on shutdown by default +area: Infra/Node Lifecycle +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 21801dee844b0..ac5255f58622a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -313,6 +313,7 @@ public void apply(Settings value, Settings current, Settings previous) { HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE, HttpTransportSettings.SETTING_HTTP_MAX_INITIAL_LINE_LENGTH, HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD, + HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD, HttpTransportSettings.SETTING_HTTP_READ_TIMEOUT, HttpTransportSettings.SETTING_HTTP_RESET_COOKIES, HttpTransportSettings.SETTING_HTTP_TCP_NO_DELAY, diff --git a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java index f9005f6e37889..3f95aff62313b 100644 --- a/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java +++ b/server/src/main/java/org/elasticsearch/http/AbstractHttpServerTransport.java @@ -70,6 +70,7 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_HOST; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_PORT; import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD; +import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD; public abstract class AbstractHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { private static final Logger logger = LogManager.getLogger(AbstractHttpServerTransport.class); @@ -95,6 +96,7 @@ public abstract class AbstractHttpServerTransport extends AbstractLifecycleCompo private final RefCounted refCounted = AbstractRefCounted.of(() -> allClientsClosedListener.onResponse(null)); private final Set httpServerChannels = ConcurrentCollections.newConcurrentSet(); private final long shutdownGracePeriodMillis; + private final long shutdownPollPeriodMillis; private final HttpClientStatsTracker httpClientStatsTracker; private final HttpTracer httpLogger; @@ -146,6 +148,7 @@ protected AbstractHttpServerTransport( slowLogThresholdMs = TransportSettings.SLOW_OPERATION_THRESHOLD_SETTING.get(settings).getMillis(); httpClientStatsTracker = new HttpClientStatsTracker(settings, clusterSettings, threadPool); shutdownGracePeriodMillis = SETTING_HTTP_SERVER_SHUTDOWN_GRACE_PERIOD.get(settings).getMillis(); + shutdownPollPeriodMillis = SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD.get(settings).getMillis(); } public Recycler recycler() { @@ -272,17 +275,36 @@ protected void doStop() { boolean closed = false; + long pollTimeMillis = shutdownPollPeriodMillis; if (shutdownGracePeriodMillis > 0) { + if (shutdownGracePeriodMillis < pollTimeMillis) { + pollTimeMillis = shutdownGracePeriodMillis; + } + logger.debug(format("waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + } else { + logger.debug("waiting indefinitely for clients to close connections"); + } + + long startPollTimeMillis = System.currentTimeMillis(); + do { try { - logger.debug(format("waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); - FutureUtils.get(allClientsClosedListener, shutdownGracePeriodMillis, TimeUnit.MILLISECONDS); + FutureUtils.get(allClientsClosedListener, pollTimeMillis, TimeUnit.MILLISECONDS); closed = true; } catch (ElasticsearchTimeoutException t) { - logger.warn(format("timed out while waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + logger.info(format("still waiting on %d client connections to close", httpChannels.size())); + if (shutdownGracePeriodMillis > 0) { + long endPollTimeMillis = System.currentTimeMillis(); + long remainingGracePeriodMillis = shutdownGracePeriodMillis - (endPollTimeMillis - startPollTimeMillis); + if (remainingGracePeriodMillis <= 0) { + logger.warn(format("timed out while waiting [%d]ms for clients to close connections", shutdownGracePeriodMillis)); + break; + } else if (remainingGracePeriodMillis < pollTimeMillis) { + pollTimeMillis = remainingGracePeriodMillis; + } + } } - } else { - logger.debug("closing all client connections immediately"); - } + } while (closed == false); + if (closed == false) { try { CloseableChannel.closeChannels(new ArrayList<>(httpChannels.values()), true); diff --git a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java index 737a99d536919..dcceb43b63db8 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTransportSettings.java @@ -132,6 +132,12 @@ public final class HttpTransportSettings { Setting.Property.NodeScope ); + public static final Setting SETTING_HTTP_SERVER_SHUTDOWN_POLL_PERIOD = Setting.positiveTimeSetting( + "http.shutdown_poll_period", + TimeValue.timeValueMinutes(5), + Setting.Property.NodeScope + ); + // don't reset cookies by default, since I don't think we really need to // note, parsing cookies was fixed in netty 3.5.1 regarding stack allocation, but still, currently, we don't need cookies public static final Setting SETTING_HTTP_RESET_COOKIES = Setting.boolSetting("http.reset_cookies", false, Property.NodeScope); diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 7f5c623dbae08..06d05f9dc06fa 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -918,8 +918,8 @@ protected void stopInternal() {} } } - public void testStopDoesntWaitIfGraceIsZero() { - try (var noWait = LogExpectation.unexpectWait(); var transport = new TestHttpServerTransport(Settings.EMPTY)) { + public void testStopWaitsIndefinitelyIfGraceIsZero() { + try (var wait = LogExpectation.expectWait(); var transport = new TestHttpServerTransport(Settings.EMPTY)) { TestHttpChannel httpChannel = new TestHttpChannel(); transport.serverAcceptedChannel(httpChannel); transport.incomingRequest(testHttpRequest(), httpChannel); @@ -927,7 +927,33 @@ public void testStopDoesntWaitIfGraceIsZero() { transport.doStop(); assertFalse(transport.testHttpServerChannel.isOpen()); assertFalse(httpChannel.isOpen()); - noWait.assertExpectationsMatched(); + wait.assertExpectationsMatched(); + } + } + + public void testStopLogsProgress() throws Exception { + TestHttpChannel httpChannel = new TestHttpChannel(); + var doneWithRequest = new CountDownLatch(1); + try (var wait = LogExpectation.expectUpdate(1); var transport = new TestHttpServerTransport(gracePeriod(SHORT_GRACE_PERIOD_MS))) { + + httpChannel.blockSendResponse(); + var inResponse = httpChannel.notifyInSendResponse(); + + transport.serverAcceptedChannel(httpChannel); + new Thread(() -> { + transport.incomingRequest(testHttpRequest(), httpChannel); + doneWithRequest.countDown(); + }, "testStopLogsProgress -> incomingRequest").start(); + + inResponse.await(); + + transport.doStop(); + assertFalse(transport.testHttpServerChannel.isOpen()); + assertFalse(httpChannel.isOpen()); + wait.assertExpectationsMatched(); + } finally { + httpChannel.allowSendResponse(); + doneWithRequest.await(); } } @@ -1345,20 +1371,24 @@ private LogExpectation(int grace) { } public static LogExpectation expectTimeout(int grace) { - return new LogExpectation(grace).timedOut(true).wait(true); + return new LogExpectation(grace).timedOut(true).wait(false); } public static LogExpectation unexpectedTimeout(int grace) { - return new LogExpectation(grace).timedOut(false).wait(true); + return new LogExpectation(grace).timedOut(false).wait(false); + } + + public static LogExpectation expectWait() { + return new LogExpectation(0).wait(true); } - public static LogExpectation unexpectWait() { - return new LogExpectation(0).wait(false); + public static LogExpectation expectUpdate(int connections) { + return new LogExpectation(0).update(connections); } private LogExpectation timedOut(boolean expected) { var message = "timed out while waiting [" + grace + "]ms for clients to close connections"; - var name = "message"; + var name = "timed out message"; var logger = AbstractHttpServerTransport.class.getName(); var level = Level.WARN; if (expected) { @@ -1370,18 +1400,27 @@ private LogExpectation timedOut(boolean expected) { } private LogExpectation wait(boolean expected) { - var message = "closing all client connections immediately"; - var name = "message"; + var message = "waiting indefinitely for clients to close connections"; + var name = "wait message"; var logger = AbstractHttpServerTransport.class.getName(); var level = Level.DEBUG; if (expected) { - appender.addExpectation(new MockLogAppender.UnseenEventExpectation(name, logger, level, message)); - } else { appender.addExpectation(new MockLogAppender.SeenEventExpectation(name, logger, level, message)); + } else { + appender.addExpectation(new MockLogAppender.UnseenEventExpectation(name, logger, level, message)); } return this; } + private LogExpectation update(int connections) { + var message = "still waiting on " + connections + " client connections to close"; + var name = "update message"; + var logger = AbstractHttpServerTransport.class.getName(); + var level = Level.INFO; + appender.addExpectation(new MockLogAppender.SeenEventExpectation(name, logger, level, message)); + return this; + } + public void assertExpectationsMatched() { appender.assertAllExpectationsMatched(); checked = true; From ff91ce024c31cacc6ea420a5a5e74a35b610b512 Mon Sep 17 00:00:00 2001 From: Athena Brown Date: Thu, 21 Mar 2024 17:17:25 -0600 Subject: [PATCH 113/214] Update and pin gradle wrapper validation action (#106635) This commit updates the Gradle wrapper validation action to the latest release and pins it to that version by hash per our company best practices for GitHub Workflows & Actions. --- .github/workflows/gradle-wrapper-validation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/gradle-wrapper-validation.yml b/.github/workflows/gradle-wrapper-validation.yml index 39bef5e91f954..46762905e2c9a 100644 --- a/.github/workflows/gradle-wrapper-validation.yml +++ b/.github/workflows/gradle-wrapper-validation.yml @@ -10,4 +10,4 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@699bb18358f12c5b78b37bb0111d3a0e2276e0e2 # Release v2.1.1 From a960befd28cb17a21bd5c671c8a1b5d7d4fb656d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 22 Mar 2024 00:52:44 +0100 Subject: [PATCH 114/214] Adding `resolveConcreteIndexNames` to DataStreamsActionUtil (#106621) Extract method `resolveConcreteIndexNames` from `DataStreamsStatsTransportAction` to `DataStreamsActionUtil`, and move the whole class to the `server` module, so it can be shared and used across plugins. --- .../DataStreamsStatsTransportAction.java | 25 +--- .../DeleteDataStreamTransportAction.java | 2 +- .../action/GetDataStreamsTransportAction.java | 1 + ...nsportDeleteDataStreamLifecycleAction.java | 2 +- ...TransportGetDataStreamLifecycleAction.java | 2 +- ...TransportPutDataStreamLifecycleAction.java | 2 +- .../datastreams}/DataStreamsActionUtil.java | 29 ++++- .../DataStreamsActionUtilTests.java | 107 ++++++++++++++++++ 8 files changed, 146 insertions(+), 24 deletions(-) rename {modules/data-streams/src/main/java/org/elasticsearch/datastreams/action => server/src/main/java/org/elasticsearch/action/datastreams}/DataStreamsActionUtil.java (58%) create mode 100644 server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index 2aa5c07ad6be5..c7d70fa06162d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PointValues; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.datastreams.DataStreamsStatsAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -44,7 +45,6 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; -import java.util.stream.Stream; public class DataStreamsStatsTransportAction extends TransportBroadcastByNodeAction< DataStreamsStatsAction.Request, @@ -104,25 +104,12 @@ protected ClusterBlockException checkRequestBlock( @Override protected String[] resolveConcreteIndexNames(ClusterState clusterState, DataStreamsStatsAction.Request request) { - List abstractionNames = indexNameExpressionResolver.dataStreamNames( + return DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, clusterState, - request.indicesOptions(), - request.indices() - ); - SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); - - String[] concreteDatastreamIndices = abstractionNames.stream().flatMap(abstractionName -> { - IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName); - assert indexAbstraction != null; - if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { - DataStream dataStream = (DataStream) indexAbstraction; - List indices = dataStream.getIndices(); - return indices.stream().map(Index::getName); - } else { - return Stream.empty(); - } - }).toArray(String[]::new); - return concreteDatastreamIndices; + request.indices(), + request.indicesOptions() + ).toArray(String[]::new); } @Override diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index 6e7528c470d49..c3e8331b856fd 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -44,7 +44,7 @@ import java.util.Set; import java.util.function.Consumer; -import static org.elasticsearch.datastreams.action.DataStreamsActionUtil.getDataStreamNames; +import static org.elasticsearch.action.datastreams.DataStreamsActionUtil.getDataStreamNames; public class DeleteDataStreamTransportAction extends AcknowledgedTransportMasterNodeAction { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index 2b1d4ae01f565..41e62508cafbb 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.datastreams.GetDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.IndexProperties; import org.elasticsearch.action.datastreams.GetDataStreamAction.Response.ManagedBy; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java index 0381014aed24b..9683588bdcae3 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportDeleteDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -19,7 +20,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java index 84144cdcb0379..3a3a54d747920 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportGetDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; @@ -21,7 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java index 31d7237eeb681..7a33d3011c621 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/TransportPutDataStreamLifecycleAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams.lifecycle.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -19,7 +20,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.datastreams.action.DataStreamsActionUtil; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java similarity index 58% rename from modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java rename to server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java index a05de46d0e12a..e9ff74459c153 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsActionUtil.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DataStreamsActionUtil.java @@ -6,13 +6,18 @@ * Side Public License, v 1. */ -package org.elasticsearch.datastreams.action; +package org.elasticsearch.action.datastreams; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.index.Index; import java.util.List; +import java.util.SortedMap; +import java.util.stream.Stream; public class DataStreamsActionUtil { @@ -40,4 +45,26 @@ public static IndicesOptions updateIndicesOptions(IndicesOptions indicesOptions) } return indicesOptions; } + + public static Stream resolveConcreteIndexNames( + IndexNameExpressionResolver indexNameExpressionResolver, + ClusterState clusterState, + String[] names, + IndicesOptions indicesOptions + ) { + List abstractionNames = getDataStreamNames(indexNameExpressionResolver, clusterState, names, indicesOptions); + SortedMap indicesLookup = clusterState.getMetadata().getIndicesLookup(); + + return abstractionNames.stream().flatMap(abstractionName -> { + IndexAbstraction indexAbstraction = indicesLookup.get(abstractionName); + assert indexAbstraction != null; + if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { + DataStream dataStream = (DataStream) indexAbstraction; + List indices = dataStream.getIndices(); + return indices.stream().map(Index::getName); + } else { + return Stream.empty(); + } + }); + } } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java new file mode 100644 index 0000000000000..81c8abed352ab --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamMetadata; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DataStreamsActionUtilTests extends ESTestCase { + + public void testDataStreamsResolveConcreteIndexNames() { + + var index1 = new Index("foo1", IndexMetadata.INDEX_UUID_NA_VALUE); + var index3 = new Index("bar", IndexMetadata.INDEX_UUID_NA_VALUE); + + var dataStreamIndex1 = new Index(".ds-foo1", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex2 = new Index(".ds-bar2", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex3 = new Index(".ds-foo2", IndexMetadata.INDEX_UUID_NA_VALUE); + var dataStreamIndex4 = new Index(".ds-baz1", IndexMetadata.INDEX_UUID_NA_VALUE); + + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) + .metadata( + Metadata.builder() + .putCustom( + DataStreamMetadata.TYPE, + new DataStreamMetadata( + ImmutableOpenMap.builder() + .fPut("fooDs", DataStreamTestHelper.newInstance("fooDs", List.of(dataStreamIndex1))) + .fPut("barDs", DataStreamTestHelper.newInstance("barDs", List.of(dataStreamIndex2))) + .fPut("foo2Ds", DataStreamTestHelper.newInstance("foo2Ds", List.of(dataStreamIndex3))) + .fPut("bazDs", DataStreamTestHelper.newInstance("bazDs", List.of(dataStreamIndex4))) + .build(), + ImmutableOpenMap.of() + ) + ) + .indices( + createLocalOnlyIndicesMetadata( + index1, + index3, + dataStreamIndex1, + dataStreamIndex2, + dataStreamIndex3, + dataStreamIndex4 + ) + ) + .build() + ) + .build(); + + var query = new String[] { "foo*", "baz*" }; + var indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); + when(indexNameExpressionResolver.dataStreamNames(any(), any(), eq(query))).thenReturn(List.of("fooDs", "foo2Ds", "bazDs")); + + var resolved = DataStreamsActionUtil.resolveConcreteIndexNames( + indexNameExpressionResolver, + clusterState, + query, + IndicesOptions.builder().wildcardOptions(IndicesOptions.WildcardOptions.builder().includeHidden(true)).build() + ).toList(); + + assertThat(resolved, containsInAnyOrder(".ds-foo1", ".ds-foo2", ".ds-baz1")); + } + + private Map createLocalOnlyIndicesMetadata(Index... indices) { + return Arrays.stream(indices) + .map( + index1 -> Map.entry( + index1.getName(), + IndexMetadata.builder(index1.getName()) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())) + .numberOfReplicas(0) + .numberOfShards(1) + .build() + ) + ) + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} From 12e64a1bddac15b49655b79e73347256e371e4fe Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 22 Mar 2024 09:11:30 +0100 Subject: [PATCH 115/214] Use historical features for Yaml REST tests for cluster apis (#106587) --- .../test/cat.indices/10_basic.yml | 4 +- .../test/cat.templates/10_basic.yml | 16 ++-- .../cluster.allocation_explain/10_basic.yml | 10 +-- .../cluster.component_template/10_basic.yml | 16 ++-- .../test/cluster.desired_balance/10_basic.yml | 36 ++++----- .../test/cluster.desired_nodes/20_dry_run.yml | 10 +-- .../test/cluster.health/10_basic.yml | 11 +-- .../cluster.health/30_indices_options.yml | 4 +- .../test/cluster.info/10_info_all.yml | 4 +- .../test/cluster.info/20_info_http.yml | 4 +- .../test/cluster.info/30_info_thread_pool.yml | 4 +- .../test/cluster.info/40_info_script.yml | 4 +- .../10_basic.yml | 18 ++--- .../test/cluster.stats/10_basic.yml | 24 +++--- .../cluster.stats/20_indexing_pressure.yml | 4 +- .../rest/yaml/YamlTestLegacyFeatures.java | 79 +++++++++++++++++-- 16 files changed, 158 insertions(+), 90 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml index 2d006f3425790..981a934a719ca 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.indices/10_basic.yml @@ -72,7 +72,7 @@ "Test cat indices output for closed index (pre 7.2.0)": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["cat_indices_replicate_closed"] + cluster_features: ["indices_replicate_closed"] - requires: test_runner_features: ["allowed_warnings"] @@ -117,7 +117,7 @@ "Test cat indices output for closed index": - skip: reason: "closed indices are replicated starting version 7.2.0" - cluster_features: ["cat_indices_replicate_closed"] + cluster_features: ["indices_replicate_closed"] - requires: test_runner_features: ["allowed_warnings"] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml index f6f20913e402b..5270d215f8cea 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -1,7 +1,7 @@ --- "Help": - requires: - cluster_features: ["cat_templates_v2"] + cluster_features: ["templates_v2"] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -32,7 +32,7 @@ --- "Normal templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -83,7 +83,7 @@ --- "Filtered templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -125,7 +125,7 @@ --- "Column headers": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -163,7 +163,7 @@ --- "Select columns": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" - do: @@ -197,7 +197,7 @@ --- "Sort templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -250,7 +250,7 @@ --- "Multiple template": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: default_shards, no_xpack @@ -286,7 +286,7 @@ --- "Mixture of legacy and composable templates": - requires: - cluster_features: [ "cat_templates_v2" ] + cluster_features: [ "templates_v2" ] reason: "format changed in 7.8 to accomodate V2 index templates" test_runner_features: allowed_warnings diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml index 1f0e2b6fd727c..8c350b50a6bf2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yml @@ -49,10 +49,10 @@ --- "Cluster shard allocation explanation test with a closed index": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["indices_replicate_closed"] reason: closed indices are replicated starting version 7.2.0 - features: ["allowed_warnings"] + test_runner_features: ["allowed_warnings"] - do: indices.create: @@ -95,8 +95,8 @@ --- "Cluster allocation explanation response includes node's roles": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["cluster_allocation_role"] reason: The roles field was introduced in 8.11.0 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml index d27abc3da7081..0308a68dae2cd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.component_template/10_basic.yml @@ -1,7 +1,7 @@ --- "Basic CRUD": - - skip: - version: " - 7.7.99" + - requires: + cluster_features: ["templates_v2"] reason: "index/component template v2 API unavailable before 7.8" - do: @@ -48,8 +48,8 @@ --- "Delete multiple templates": - - skip: - version: " - 7.99.99" + - requires: + cluster_features: ["cluster_templates_delete_multiple"] reason: "not yet backported" - do: @@ -116,8 +116,8 @@ --- "Add data stream lifecycle": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["datastream_lifecycle"] reason: "Data stream lifecycle was available from 8.11" - do: @@ -144,8 +144,8 @@ --- "Get data stream lifecycle with default rollover": - - skip: - version: " - 8.10.99" + - requires: + cluster_features: ["datastream_lifecycle"] reason: "Data stream lifecycle was available from 8.11" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml index a4204034bfd80..cd213ebe72a8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_balance/10_basic.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.5.99" + - requires: + cluster_features: ["cluster_desired_balance"] reason: "API added in in 8.6.0" --- @@ -61,8 +61,8 @@ setup: --- "Test cluster_balance_stats": - - skip: - version: " - 8.6.99" + - requires: + cluster_features: ["cluster_desired_balance_stats"] reason: "cluster_balance_stats added in in 8.7.0" - do: @@ -109,8 +109,8 @@ setup: --- "Test cluster_info": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_info"] reason: "cluster_info added in in 8.8.0" - do: @@ -121,8 +121,8 @@ setup: --- "Test cluster_balance_stats contains node ID and roles": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "node_id and roles added in in 8.8.0" - do: @@ -140,8 +140,8 @@ setup: --- "Test tier_preference": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "tier_preference added in in 8.8.0" - do: @@ -165,8 +165,8 @@ setup: --- "Test computed_shard_movements": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "computed_shard_movements added in in 8.8.0" - do: @@ -177,8 +177,8 @@ setup: --- "Test reset desired balance": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_desired_balance_extended"] reason: "reset API added in in 8.8.0" - do: @@ -187,8 +187,8 @@ setup: --- "Test undesired_shard_allocation_count": - - skip: - version: " - 8.11.99" + - requires: + cluster_features: ["cluster_desired_balance_stats_undesired_count"] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: @@ -225,8 +225,8 @@ setup: --- "Test unassigned_shards, total_allocations, undesired_allocations and undesired_allocations_fraction": - - skip: - version: " - 8.11.99" + - requires: + cluster_features: [ "cluster_desired_balance_stats_undesired_count" ] reason: "undesired_shard_allocation_count added in in 8.12.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml index 3723c5b2165ca..56a693febec82 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.desired_nodes/20_dry_run.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.3.99" + - requires: + cluster_features: ["cluster_desired_nodes_dry_run"] reason: "Support for the dry run option was added in in 8.4.0" --- teardown: @@ -12,7 +12,7 @@ teardown: "Test dry run doesn't update empty desired nodes": - skip: version: " - 8.12.99" - reason: "version_node removed from version 8.13 onwards" + reason: "version_node removed from version 8.13 onwards, require the new api" - do: cluster.state: {} @@ -42,7 +42,7 @@ teardown: "Test dry run doesn't update existing desired nodes": - skip: version: " - 8.12.99" - reason: "version_node removed from version 8.13 onwards" + reason: "version_node removed from version 8.13 onwards, require the new api" - do: cluster.state: {} @@ -94,7 +94,7 @@ teardown: --- "Test validation works for dry run updates": - skip: - version: "8.9.99 - " + cluster_features: ["cluster_desired_nodes_no_settings_validation"] reason: "We started skipping setting validations in 8.10" - do: cluster.state: { } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml index a051b3626b217..586bd3b7cfb6b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/10_basic.yml @@ -132,9 +132,10 @@ --- "cluster health with closed index (pre 7.2.0)": - skip: - version: "7.2.0 - " + cluster_features: ["indices_replicate_closed"] reason: "closed indices are replicated starting version 7.2.0" - features: ["allowed_warnings"] + - requires: + test_runner_features: ["allowed_warnings"] - do: indices.create: @@ -208,10 +209,10 @@ --- "cluster health with closed index": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["indices_replicate_closed"] reason: "closed indices are replicated starting version 7.2.0" - features: ["allowed_warnings", "default_shards"] + test_runner_features: ["allowed_warnings", "default_shards"] - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml index 8144a2c1a1dbf..5caa0ebad30b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.health/30_indices_options.yml @@ -34,8 +34,8 @@ setup: --- "cluster health with expand_wildcards": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["cluster_health_indices_options"] reason: "indices options has been introduced in cluster health request starting version 7.2.0" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml index 4e88c14a2b1dc..023e35cd4bee1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/10_info_all.yml @@ -1,7 +1,7 @@ --- setup: - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/_all only available from v8.9" --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml index f238e5116e146..ff7d982b14fee 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/20_info_http.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/http only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml index 35a538ac3a44a..261f1d8ea56cb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/30_info_thread_pool.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/thread_pool only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml index 8c4b3e2832daf..bb7597537014e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.info/40_info_script.yml @@ -1,7 +1,7 @@ --- "Cluster HTTP Info": - - skip: - version: " - 8.8.99" + - requires: + cluster_features: ["cluster_info_extended"] reason: "/_info/script only available from v8.9" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml index 740836efcdc46..fda715e416ac2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.prevalidate_node_removal/10_basic.yml @@ -1,8 +1,8 @@ --- "Prevalidation basic test": - - skip: - features: contains - version: "- 8.6.99" + - requires: + test_runner_features: ["contains"] + cluster_features: ["cluster_prevalidate_node_removal_reason"] reason: "The reason field was introduced in 8.7.0" # Fetch a node ID and stash it in node_id @@ -19,8 +19,8 @@ - contains: {nodes: {id: "$node_id", result: {is_safe: true, reason: no_problems, message: ""}}} --- "Prevalidation with no node specified": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -31,8 +31,8 @@ - match: { error.reason: "Validation Failed: 1: request must contain one of the parameters 'names', 'ids', or 'external_ids';" } --- "Prevalidation with more than one query parameter": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: bad_request @@ -45,8 +45,8 @@ - match: { error.reason: "Validation Failed: 1: request must contain only one of the parameters 'names', 'ids', or 'external_ids';" } --- "Prevalidation with non-existing node": - - skip: - version: "- 8.5.99" + - requires: + cluster_features: ["cluster_prevalidate_node_removal"] reason: "API added in 8.6.0" - do: catch: missing diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml index a46d9a67a863d..2c48282332909 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_basic.yml @@ -75,8 +75,8 @@ --- "get cluster stats returns packaging types": - - skip: - version: " - 7.1.99" + - requires: + cluster_features: ["cluster_stats_packaging_types"] reason: "packaging types are added for v7.2.0" - do: @@ -85,8 +85,8 @@ - is_true: nodes.packaging_types --- "get cluster stats without runtime fields": - - skip: - version: " - 7.12.99" + - requires: + cluster_features: ["cluster_stats_runtime_fields"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -98,8 +98,8 @@ --- "Usage stats with script-less runtime fields": - - skip: - version: " - 7.12.99" + - requires: + cluster_features: ["cluster_stats_runtime_fields"] reason: "cluster stats includes runtime fields from 7.13 on" - do: indices.create: @@ -168,8 +168,8 @@ --- "mappings sizes reported in get cluster stats": - - skip: - version: " - 8.3.99" + - requires: + cluster_features: ["cluster_stats_mapping_sizes"] reason: "mapping sizes reported from 8.4 onwards" - do: indices.create: @@ -188,8 +188,8 @@ --- "snapshot stats reported in get cluster stats": - - skip: - version: " - 8.7.99" + - requires: + cluster_features: ["cluster_stats_snapshots"] reason: "snapshot stats reported from 8.8 onwards" - do: @@ -231,8 +231,8 @@ --- "Dense vector stats": - - skip: - version: " - 8.9.99" + - requires: + cluster_features: ["cluster_stats_dense_vectors"] reason: "dense vector stats added in 8.10" - do: indices.create: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml index 9178fab25e230..648964d9e721f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/20_indexing_pressure.yml @@ -1,7 +1,7 @@ --- "Indexing pressure cluster stats": - - skip: - version: " - 8.0.99" + - requires: + cluster_features: ["cluster_stats_indexing_pressure"] reason: "indexing_pressure in cluster was added in 8.1" - do: diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java index 326afdaa7ae1a..2ce35888c3f14 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/YamlTestLegacyFeatures.java @@ -19,7 +19,6 @@ * to support BwC. Rather than leaving them in the main src we group them here, so it's clear they are not used in production code anymore. */ public class YamlTestLegacyFeatures implements FeatureSpecification { - private static final NodeFeature BULK_AUTO_ID = new NodeFeature("bulk_auto_id"); private static final NodeFeature BULK_REQUIRE_ALIAS = new NodeFeature("bulk_require_alias"); private static final NodeFeature BULK_DYNAMIC_TEMPLATE_OP_TYPE = new NodeFeature("bulk_dynamic_template_op_type"); @@ -36,7 +35,6 @@ public class YamlTestLegacyFeatures implements FeatureSpecification { private static final NodeFeature CAT_ALLOCATION_NODE_ROLE = new NodeFeature("cat_allocation_node_role"); private static final NodeFeature CAT_INDICES_DATASET_SIZE = new NodeFeature("cat_indices_dataset_size"); - private static final NodeFeature CAT_INDICES_REPLICATE_CLOSED = new NodeFeature("cat_indices_replicate_closed"); private static final NodeFeature CAT_INDICES_VALIDATE_HEALTH_PARAM = new NodeFeature("cat_indices_validate_health_param"); private static final NodeFeature CAT_PLUGINS_NEW_FORMAT = new NodeFeature("cat_plugins_new_format"); @@ -48,9 +46,47 @@ public class YamlTestLegacyFeatures implements FeatureSpecification { private static final NodeFeature CAT_TASKS_X_OPAQUE_ID = new NodeFeature("cat_tasks_x_opaque_id"); - private static final NodeFeature CAT_TEMPLATES_V2 = new NodeFeature("cat_templates_v2"); private static final NodeFeature CAT_TEMPLATE_NAME_VALIDATION = new NodeFeature("cat_template_name_validation"); + private static final NodeFeature CLUSTER_TEMPLATES_DELETE_MULTIPLE = new NodeFeature("cluster_templates_delete_multiple"); + + private static final NodeFeature CLUSTER_ALLOCATION_ROLE = new NodeFeature("cluster_allocation_role"); + + private static final NodeFeature CLUSTER_DESIRED_BALANCE = new NodeFeature("cluster_desired_balance"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS = new NodeFeature("cluster_desired_balance_stats"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_EXTENDED = new NodeFeature("cluster_desired_balance_extended"); + private static final NodeFeature CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT = new NodeFeature( + "cluster_desired_balance_stats_undesired_count" + ); + + private static final NodeFeature CLUSTER_DESIRED_NODES_OLD = new NodeFeature("cluster_desired_nodes_old"); + private static final NodeFeature CLUSTER_DESIRED_NODES_DRY_RUN = new NodeFeature("cluster_desired_nodes_dry_run"); + private static final NodeFeature CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION = new NodeFeature( + "cluster_desired_nodes_no_settings_validation" + ); + private static final NodeFeature CLUSTER_DESIRED_NODES = new NodeFeature("cluster_desired_nodes"); + + private static final NodeFeature CLUSTER_HEALTH_INDICES_OPTIONS = new NodeFeature("cluster_health_indices_options"); + + private static final NodeFeature CLUSTER_INFO = new NodeFeature("cluster_info"); + private static final NodeFeature CLUSTER_INFO_EXTENDED = new NodeFeature("cluster_info_extended"); + + private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL = new NodeFeature("cluster_prevalidate_node_removal"); + private static final NodeFeature CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON = new NodeFeature("cluster_prevalidate_node_removal_reason"); + + private static final NodeFeature CLUSTER_STATS_PACKAGING_TYPES = new NodeFeature("cluster_stats_packaging_types"); + private static final NodeFeature CLUSTER_STATS_RUNTIME_FIELDS = new NodeFeature("cluster_stats_runtime_fields"); + private static final NodeFeature CLUSTER_STATS_INDEXING_PRESSURE = new NodeFeature("cluster_stats_indexing_pressure"); + private static final NodeFeature CLUSTER_STATS_MAPPING_SIZES = new NodeFeature("cluster_stats_mapping_sizes"); + private static final NodeFeature CLUSTER_STATS_SNAPSHOTS = new NodeFeature("cluster_stats_snapshots"); + private static final NodeFeature CLUSTER_STATS_DENSE_VECTORS = new NodeFeature("cluster_stats_dense_vectors"); + + private static final NodeFeature DATASTREAM_LIFECYCLE = new NodeFeature("datastream_lifecycle"); + + private static final NodeFeature TEMPLATES_V2 = new NodeFeature("templates_v2"); + + private static final NodeFeature INDICES_REPLICATE_CLOSED = new NodeFeature("indices_replicate_closed"); + @Override public Map getHistoricalFeatures() { return Map.ofEntries( @@ -66,7 +102,6 @@ public Map getHistoricalFeatures() { Map.entry(CAT_ALLOCATION_NODE_ROLE, Version.V_8_10_0), - Map.entry(CAT_INDICES_REPLICATE_CLOSED, Version.V_7_2_0), Map.entry(CAT_INDICES_VALIDATE_HEALTH_PARAM, Version.V_7_8_0), Map.entry(CAT_INDICES_DATASET_SIZE, Version.V_8_11_0), @@ -79,8 +114,40 @@ public Map getHistoricalFeatures() { Map.entry(CAT_TASKS_X_OPAQUE_ID, Version.V_7_10_0), - Map.entry(CAT_TEMPLATES_V2, Version.V_7_8_0), - Map.entry(CAT_TEMPLATE_NAME_VALIDATION, Version.V_7_16_0) + Map.entry(CAT_TEMPLATE_NAME_VALIDATION, Version.V_7_16_0), + + Map.entry(CLUSTER_TEMPLATES_DELETE_MULTIPLE, Version.V_8_0_0), + Map.entry(CLUSTER_ALLOCATION_ROLE, Version.V_8_11_0), + + Map.entry(CLUSTER_DESIRED_BALANCE, Version.V_8_6_0), + Map.entry(CLUSTER_DESIRED_BALANCE_STATS, Version.V_8_7_0), + Map.entry(CLUSTER_DESIRED_BALANCE_EXTENDED, Version.V_8_8_0), + Map.entry(CLUSTER_DESIRED_BALANCE_STATS_UNDESIRED_COUNT, Version.V_8_12_0), + + Map.entry(CLUSTER_DESIRED_NODES_OLD, Version.V_8_3_0), + Map.entry(CLUSTER_DESIRED_NODES_DRY_RUN, Version.V_8_4_0), + Map.entry(CLUSTER_DESIRED_NODES_NO_SETTINGS_VALIDATION, Version.V_8_10_0), + + Map.entry(CLUSTER_HEALTH_INDICES_OPTIONS, Version.V_7_2_0), + + Map.entry(CLUSTER_INFO, Version.V_8_8_0), + Map.entry(CLUSTER_INFO_EXTENDED, Version.V_8_9_0), + + Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL, Version.V_8_6_0), + Map.entry(CLUSTER_PREVALIDATE_NODE_REMOVAL_REASON, Version.V_8_7_0), + + Map.entry(CLUSTER_STATS_PACKAGING_TYPES, Version.V_7_2_0), + Map.entry(CLUSTER_STATS_RUNTIME_FIELDS, Version.V_7_13_0), + Map.entry(CLUSTER_STATS_INDEXING_PRESSURE, Version.V_8_1_0), + Map.entry(CLUSTER_STATS_MAPPING_SIZES, Version.V_8_4_0), + Map.entry(CLUSTER_STATS_SNAPSHOTS, Version.V_8_8_0), + Map.entry(CLUSTER_STATS_DENSE_VECTORS, Version.V_8_10_0), + + Map.entry(DATASTREAM_LIFECYCLE, Version.V_8_11_0), + + Map.entry(INDICES_REPLICATE_CLOSED, Version.V_7_2_0), + + Map.entry(TEMPLATES_V2, Version.V_7_8_0) ); } } From 1fe4946f81fcc08c23ceccaf94bfacd36846b17e Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Mar 2024 08:18:48 +0000 Subject: [PATCH 116/214] Fix testScheduledFixedDelayRejection (#106630) Not really necessary to allow the scheduled task to race against the blocks, and this race is a source of test flakiness. Fixed by imposing the blocks first. Closes #106618 --- .../org/elasticsearch/threadpool/ThreadPoolTests.java | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java index 8a61054f5fa93..66d3dd7a829eb 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolTests.java @@ -465,7 +465,6 @@ public void testScheduledOneShotForceExecution() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106618") public void testScheduledFixedDelayRejection() { final var name = "fixed-bounded"; final var threadPool = new TestThreadPool( @@ -476,17 +475,14 @@ public void testScheduledFixedDelayRejection() { final var future = new PlainActionFuture(); final var latch = new CountDownLatch(1); try { + blockExecution(threadPool.executor(name), latch); threadPool.scheduleWithFixedDelay( - ActionRunnable.wrap(future, ignored -> Thread.yield()), + ActionRunnable.wrap(future, ignored -> fail("should not execute")), TimeValue.timeValueMillis(between(1, 100)), threadPool.executor(name) ); - while (future.isDone() == false) { - // might not block all threads the first time round if the scheduled runnable is running, so must keep trying - blockExecution(threadPool.executor(name), latch); - } - expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future)); + expectThrows(EsRejectedExecutionException.class, () -> FutureUtils.get(future, 10, TimeUnit.SECONDS)); } finally { latch.countDown(); assertTrue(terminate(threadPool)); From cc8fb4dba9536c33abf6a4f396657ba97e52518c Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 22 Mar 2024 09:42:29 +0100 Subject: [PATCH 117/214] ESQL: Re-enable logical dependency check (#105860) * Fix Enrich refs locally in the dependency check * Re-enable logical dependency check + test for it * Return server error on failed dependency check --- docs/changelog/105860.yaml | 5 +++++ .../xpack/esql/optimizer/LogicalVerifier.java | 9 ++++++--- .../xpack/esql/optimizer/OptimizerRules.java | 10 ++++++++++ .../esql/optimizer/LogicalPlanOptimizerTests.java | 3 +-- 4 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/105860.yaml diff --git a/docs/changelog/105860.yaml b/docs/changelog/105860.yaml new file mode 100644 index 0000000000000..71f3544a02a1f --- /dev/null +++ b/docs/changelog/105860.yaml @@ -0,0 +1,5 @@ +pr: 105860 +summary: "ESQL: Re-enable logical dependency check" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java index bf569ee587dbc..6b62029bd8f45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalVerifier.java @@ -22,11 +22,10 @@ private LogicalVerifier() {} /** Verifies the optimized logical plan. */ public Failures verify(LogicalPlan plan) { Failures failures = new Failures(); + Failures dependencyFailures = new Failures(); plan.forEachUp(p -> { - // dependency check - // FIXME: re-enable - // DEPENDENCY_CHECK.checkPlan(p, failures); + DEPENDENCY_CHECK.checkPlan(p, dependencyFailures); if (failures.hasFailures() == false) { p.forEachExpression(ex -> { @@ -37,6 +36,10 @@ public Failures verify(LogicalPlan plan) { } }); + if (dependencyFailures.hasFailures()) { + throw new IllegalStateException(dependencyFailures.toString()); + } + return failures; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index 645924907b6f5..e375f11ab3ae7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -79,6 +79,16 @@ protected AttributeSet generates(P p) { } static class LogicalPlanDependencyCheck extends DependencyConsistency { + @Override + protected AttributeSet references(LogicalPlan plan) { + if (plan instanceof Enrich enrich) { + // The enrichFields are NamedExpressions, so we compute their references as well when just calling enrich.references(). + // But they are not actually referring to attributes from the input plan - only the match field does. + return enrich.matchField().references(); + } + return super.references(plan); + } + @Override protected AttributeSet generates(LogicalPlan logicalPlan) { // source-like operators diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c6747c9d65d24..952fbc6f265e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -3299,7 +3299,6 @@ public void testEmptyMappingIndex() { assertThat(Expressions.names(local.output()), contains(NO_FIELDS.get(0).name(), "x", "language_code", "language_name")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105436") public void testPlanSanityCheck() throws Exception { var plan = optimizedPlan(""" from test @@ -3325,7 +3324,7 @@ public void testPlanSanityCheck() throws Exception { ) ); - VerificationException e = expectThrows(VerificationException.class, () -> logicalOptimizer.optimize(invalidPlan)); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> logicalOptimizer.optimize(invalidPlan)); assertThat(e.getMessage(), containsString("Plan [OrderBy[[Order[salary")); assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary")); } From b5923eb8dc42d11eb2d44ea8802008066ebc7344 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Mar 2024 08:56:15 +0000 Subject: [PATCH 118/214] AwaitsFix for #106650 --- .../test/ml/search_knn_query_vector_builder.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml index 97e5146e9af86..50f687f704994 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml @@ -106,8 +106,10 @@ setup: --- "Test vector search with query_vector_builder": - skip: - version: " - 8.13.99" - reason: "introduced after 8.13" + version: all + reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/106650 +# version: " - 8.13.99" +# reason: "introduced after 8.13" - do: search: index: index-with-embedded-text From 23278a52cf60e9d155b4423c8859379c7130d8bc Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Mar 2024 09:50:18 +0000 Subject: [PATCH 119/214] Expand conceptual docs around `ActionListener` (#105405) Rephrases the docs about `ActionListener` in terms of continuations and control flow, rather than as injecting logic into lower layers. --- docs/internal/DistributedArchitectureGuide.md | 72 ++++++++++++++++--- 1 file changed, 61 insertions(+), 11 deletions(-) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index ea5942ff71cc8..a89956721a481 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,20 +10,70 @@ ### ActionListener -`ActionListener`s are a means off injecting logic into lower layers of the code. They encapsulate a block of code that takes a response -value -- the `onResponse()` method --, and then that block of code (the `ActionListener`) is passed into a function that will eventually -execute the code (call `onResponse()`) when a response value is available. `ActionListener`s are used to pass code down to act on a result, -rather than lower layers returning a result back up to be acted upon by the caller. One of three things can happen to a listener: it can be -executed in the same thread — e.g. `ActionListener.run()` --; it can be passed off to another thread to be executed; or it can be added to -a list someplace, to eventually be executed by some service. `ActionListener`s also define `onFailure()` logic, in case an error is -encountered before a result can be formed. +Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which +doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. +They support several useful control flows: + +- They can be completed immediately on the calling thread. +- They can be completed concurrently on a different thread. +- They can be stored in a data structure and completed later on when the system reaches a particular state. +- Most commonly, they can be passed on to other methods that themselves require a callback. +- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + before or after completion, before passing them on. + +`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is +used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose +parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes +it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is +certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances +themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: + +- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself +- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere +- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel +- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows + +Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code +without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to +waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our +code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The +entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at +[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), +and transport APIs all start at +[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) +and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via +callbacks. + +`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the +sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function +which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual +style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two +ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an +`onResponse()` and an `onFailure()` method. + +CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also +enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, +perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before +proceeding (e.g. +[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) +amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the +programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all +the callbacks ourselves. + +Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that +asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, +and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In +particular, it's not uncommon to permit some methods to throw an exception, using things like +[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) +(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an +`ActionListener` parameter, but still return a value separately for other local synchronous work. This pattern is often used in the transport action layer with the use of the -[ChannelActionListener]([url](https://github.com/elastic/elasticsearch/blob/8.12/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java)) +[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so -a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, -barring caller timeouts. +channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a +call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring +caller timeouts. (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) From 4647691809b56892a8ab6b6e93e6a5dcfae09a3e Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Fri, 22 Mar 2024 11:00:39 +0100 Subject: [PATCH 120/214] Clarify the difference between ImmutableEntry and Map.entry (#106588) --- server/src/main/java/org/elasticsearch/common/util/Maps.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index 1b46e71dadd12..fc911793711b7 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -331,8 +331,7 @@ public static Map transformValues(Map source, Function /** * An immutable implementation of {@link Map.Entry}. - * @param key key - * @param value value + * Unlike {@code Map.entry(...)} this implementation permits null key and value. */ public record ImmutableEntry(KType key, VType value) implements Map.Entry { From 69ecdc643b8d7b7c9482a8feeee6380cae7abb8d Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Fri, 22 Mar 2024 11:26:45 +0100 Subject: [PATCH 121/214] Expose workflow restriction parsing (#106648) Need this one for custom role parsing upstream. --- .../elasticsearch/xpack/core/security/authz/RoleDescriptor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index e9aa982a05d8b..ecbd12a7f4643 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -1623,7 +1623,7 @@ public String toString() { return sb.toString(); } - static Restriction parse(String roleName, XContentParser parser) throws IOException { + public static Restriction parse(String roleName, XContentParser parser) throws IOException { // advance to the START_OBJECT token if needed XContentParser.Token token = parser.currentToken() == null ? parser.nextToken() : parser.currentToken(); if (token != XContentParser.Token.START_OBJECT) { From 14ca58c9260e3d9bca86d561d254e84ed9266ed7 Mon Sep 17 00:00:00 2001 From: Dmitry Cherniachenko <2sabio@gmail.com> Date: Fri, 22 Mar 2024 11:27:45 +0100 Subject: [PATCH 122/214] `Sets` utility class code cleanup (#105350) * `Sets` utility class code cleanup - newHashSetWithExpectedSize() returns HashSet, same as newHashSet() - haveEmptyIntersection() iterates a smaller set, same as intersection() - sortedDifference() accepts sets with Comparable values - replace Set.copyOf() in addToCopy() with Set.of() to avoid 1 extra copy of values --- .../elasticsearch/common/util/set/Sets.java | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java index adfa5023f2b35..75e5717d41b9f 100644 --- a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java +++ b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java @@ -40,7 +40,7 @@ public static HashSet newHashSet(T... elements) { return new HashSet<>(Arrays.asList(elements)); } - public static Set newHashSetWithExpectedSize(int expectedSize) { + public static HashSet newHashSetWithExpectedSize(int expectedSize) { return new HashSet<>(capacity(expectedSize)); } @@ -53,7 +53,17 @@ static int capacity(int expectedSize) { return expectedSize < 2 ? expectedSize + 1 : (int) (expectedSize / 0.75 + 1.0); } - public static boolean haveEmptyIntersection(Set left, Set right) { + public static boolean haveEmptyIntersection(Set set1, Set set2) { + final Set left; + final Set right; + if (set1.size() < set2.size()) { + left = set1; + right = set2; + } else { + left = set2; + right = set1; + } + for (T t : left) { if (right.contains(t)) { return false; @@ -95,7 +105,7 @@ public static Set difference(Set left, Set right) { * @param the type of the elements of the sets * @return the sorted relative complement of the left set with respect to the right set */ - public static SortedSet sortedDifference(final Set left, final Set right) { + public static > SortedSet sortedDifference(final Set left, final Set right) { final SortedSet set = new TreeSet<>(); for (T k : left) { if (right.contains(k) == false) { @@ -165,11 +175,12 @@ public static Set intersection(Set set1, Set set2) { * * @param set set to copy * @param elements elements to add + * @return the unmodifiable copy of the input set with the extra elements added */ @SuppressWarnings("unchecked") public static Set addToCopy(Set set, E... elements) { final var res = new HashSet<>(set); Collections.addAll(res, elements); - return Set.copyOf(res); + return (Set) Set.of(res.toArray()); } } From d8fc8779c469e688336a23bd34f91ce8af9f417c Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 22 Mar 2024 12:06:27 +0100 Subject: [PATCH 123/214] Fix S3RepositoryThirdPartyTests.testReadFromPositionLargerThanBlobLength (#106466) The test should use a random operation purpose that is not "Indices", otherwise S3RetryingInputStream retries up to Integer.MAX_VALUE times which causes the test suite to timeout. Also fixes the progress in the retries log messages. Closes #106457 --- .../s3/S3RepositoryThirdPartyTests.java | 28 ++++++++++++++---- .../s3/S3RetryingInputStream.java | 29 +++++++++++++++---- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 583d1477fbaa9..085e357da5ae9 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.repositories.s3; -import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; @@ -26,6 +25,7 @@ import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -44,12 +44,14 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.blankOrNullString; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -226,7 +228,6 @@ List listMultipartUploads() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106457") public void testReadFromPositionLargerThanBlobLength() { final var blobName = randomIdentifier(); final var blobBytes = randomBytesReference(randomIntBetween(100, 2_000)); @@ -239,9 +240,26 @@ public void testReadFromPositionLargerThanBlobLength() { long position = randomLongBetween(blobBytes.length(), Long.MAX_VALUE - 1L); long length = randomLongBetween(1L, Long.MAX_VALUE - position); - var exception = expectThrows(AmazonClientException.class, () -> readBlob(repository, blobName, position, length)); - assertThat(exception, instanceOf(AmazonS3Exception.class)); - assertThat(((AmazonS3Exception) exception).getStatusCode(), equalTo(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus())); + var exception = expectThrows(UncategorizedExecutionException.class, () -> readBlob(repository, blobName, position, length)); + assertThat(exception.getCause(), instanceOf(ExecutionException.class)); + assertThat(exception.getCause().getCause(), instanceOf(IOException.class)); + assertThat( + exception.getCause().getCause().getMessage(), + containsString( + "Requested range [start=" + + position + + ", end=" + + (position + length - 1L) + + ", currentOffset=0] cannot be satisfied for blob object [" + + repository.basePath().buildAsString() + + blobName + + ']' + ) + ); + assertThat( + asInstanceOf(AmazonS3Exception.class, exception.getRootCause()).getStatusCode(), + equalTo(RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) + ); } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 998455a658406..d08ff5eefd20f 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.core.IOUtils; import org.elasticsearch.repositories.s3.S3BlobStore.Operation; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.io.InputStream; @@ -94,16 +95,34 @@ private void openStreamWithRetry() throws IOException { : "requesting beyond end, start = " + start + " offset=" + currentOffset + " end=" + end; getObjectRequest.setRange(Math.addExact(start, currentOffset), end); } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); this.currentStreamFirstOffset = Math.addExact(start, currentOffset); + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); this.currentStreamLastOffset = Math.addExact(currentStreamFirstOffset, getStreamLength(s3Object)); this.currentStream = s3Object.getObjectContent(); return; } catch (AmazonClientException e) { - if (e instanceof AmazonS3Exception amazonS3Exception && 404 == amazonS3Exception.getStatusCode()) { - throw addSuppressedExceptions( - new NoSuchFileException("Blob object [" + blobKey + "] not found: " + amazonS3Exception.getMessage()) - ); + if (e instanceof AmazonS3Exception amazonS3Exception) { + if (amazonS3Exception.getStatusCode() == RestStatus.NOT_FOUND.getStatus()) { + throw addSuppressedExceptions( + new NoSuchFileException("Blob object [" + blobKey + "] not found: " + amazonS3Exception.getMessage()) + ); + } + if (amazonS3Exception.getStatusCode() == RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) { + throw addSuppressedExceptions( + new IOException( + "Requested range [start=" + + start + + ", end=" + + end + + ", currentOffset=" + + currentOffset + + "] cannot be satisfied for blob object [" + + blobKey + + ']', + amazonS3Exception + ) + ); + } } if (attempt == 1) { From a8188f8173caf07023aedc490d22fd9050cdab1e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 22 Mar 2024 11:12:45 +0000 Subject: [PATCH 124/214] [ML] Fix Array out of bounds exception in the XLM Roberta tokenizer (#106655) Increases the buffer size for the normalised form of the input unicode character. Certain characters can have surprisingly long normalised forms --- docs/changelog/106655.yaml | 5 +++++ .../nlp/tokenizers/PrecompiledCharMapNormalizer.java | 5 +---- .../nlp/tokenizers/PrecompiledCharMapNormalizerTests.java | 5 +++++ 3 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/106655.yaml diff --git a/docs/changelog/106655.yaml b/docs/changelog/106655.yaml new file mode 100644 index 0000000000000..98078595d5f0c --- /dev/null +++ b/docs/changelog/106655.yaml @@ -0,0 +1,5 @@ +pr: 106655 +summary: Fix Array out of bounds exception in the XLM Roberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index 836c9a78f19d9..93dc8077196d7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -73,10 +73,8 @@ static Config fromBase64EncodedResource(String resourcePath) throws IOException private final int[] offsets; // The entire normalized bytes representations delimited by NULL private final byte[] normalizedStrUtf8Bytes; - // Continually reused to copy a single char into utf8 bytes - private final byte[] reusableCharByteBuffer = new byte[4]; // reusable char buffer for decoding utf8 bytes to determine char offset corrections - private final char[] reusableCharDecodeBuffer = new char[8]; + private final char[] reusableCharDecodeBuffer = new char[64]; private Reader transformedInput; public PrecompiledCharMapNormalizer(int[] offsets, String normalizedStr, Reader in) { @@ -172,7 +170,6 @@ Reader normalize(CharSequence str) { ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(str)); byte[] strBytes = new byte[byteBuffer.limit()]; byteBuffer.get(strBytes); - int[] strCp = str.codePoints().toArray(); BreakIterator b = BreakIterator.getCharacterInstance(Locale.ROOT); b.setText(str); // We iterate the whole string, so b.first() is always `0` diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index d542b97eee192..eef9902d35e59 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -57,6 +57,11 @@ public void testEmoji() throws IOException { assertNormalization("😀", parsed, "😀"); } + public void testCharThatNormalizesToLongText() throws IOException { + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + assertNormalization("ﷺ", parsed, "صلى الله عليه وسلم"); + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), From c41df745a6e0c954ee8827e7bac7fd0b71e2bb52 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 22 Mar 2024 12:31:16 +0100 Subject: [PATCH 125/214] Move more XContent parsers that are only used in tests to test codebase (#105801) Just like a couple times before, moving a couple more of the test only parsers to the test codebase to save code-size etc. --- .../geoip/stats/GeoIpDownloaderStats.java | 34 +---- .../GeoIpDownloaderStatsSerializingTests.java | 17 ++- .../action/DocWriteResponse.java | 2 +- .../node/tasks/list/ListTasksResponse.java | 41 +---- .../verify/VerifyRepositoryResponse.java | 22 --- .../ClusterUpdateSettingsResponse.java | 25 +-- .../status/SnapshotsStatusResponse.java | 22 --- .../GetScriptContextResponse.java | 29 +--- .../analyze/ReloadAnalyzersResponse.java | 57 +------ .../admin/indices/open/OpenIndexResponse.java | 16 -- .../validate/query/ValidateQueryResponse.java | 41 ----- .../action/delete/DeleteResponse.java | 20 --- .../action/explain/ExplainResponse.java | 54 +------ .../action/index/IndexResponse.java | 20 --- .../support/master/AcknowledgedResponse.java | 2 +- .../master/ShardsAcknowledgedResponse.java | 2 +- .../support/tasks/BaseTasksResponse.java | 4 +- .../action/update/UpdateResponse.java | 31 +--- .../cluster/health/ClusterIndexHealth.java | 11 -- .../index/reindex/BulkByScrollTask.java | 138 ----------------- .../search/profile/SearchProfileResults.java | 81 +--------- .../verify/VerifyRepositoryResponseTests.java | 23 ++- .../ClusterUpdateSettingsResponseTests.java | 17 ++- .../status/SnapshotsStatusResponseTests.java | 19 ++- .../GetScriptContextResponseTests.java | 26 +++- .../analyze/ReloadAnalyzersResponseTests.java | 48 +++++- .../indices/open/OpenIndexResponseTests.java | 15 +- .../query/ValidateQueryResponseTests.java | 36 ++++- .../action/bulk/BulkItemResponseTests.java | 7 +- .../action/delete/DeleteResponseTests.java | 14 +- .../action/explain/ExplainResponseTests.java | 41 ++++- .../action/index/IndexResponseTests.java | 12 +- .../action/update/UpdateResponseTests.java | 29 +++- .../health/ClusterIndexHealthTests.java | 9 +- .../reindex/BulkByScrollResponseTests.java | 2 +- ...ulkByScrollTaskStatusOrExceptionTests.java | 2 +- .../reindex/BulkByScrollTaskStatusTests.java | 144 +++++++++++++++++- .../profile/SearchProfileResultsTests.java | 3 +- .../tasks/ListTasksResponseTests.java | 40 ++++- .../search/SearchResponseUtils.java | 78 +++++++++- .../core/ilm/ExplainLifecycleResponse.java | 25 --- .../action/PreviewTransformAction.java | 26 ---- .../ilm/ExplainLifecycleResponseTests.java | 22 ++- .../PreviewTransformsActionResponseTests.java | 27 +++- .../action/PutAnalyticsCollectionAction.java | 15 -- ...CollectionResponseBWCSerializingTests.java | 3 +- 46 files changed, 639 insertions(+), 713 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java index acfda99ae42fc..64b704a484058 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStats.java @@ -13,10 +13,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.ingest.geoip.GeoIpDownloader; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -25,26 +23,12 @@ public class GeoIpDownloaderStats implements Task.Status { public static final GeoIpDownloaderStats EMPTY = new GeoIpDownloaderStats(0, 0, 0, 0, 0, 0); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "geoip_downloader_stats", - a -> new GeoIpDownloaderStats((int) a[0], (int) a[1], (long) a[2], (int) a[3], (int) a[4], a[5] == null ? 0 : (int) a[5]) - ); - - private static final ParseField SUCCESSFUL_DOWNLOADS = new ParseField("successful_downloads"); - private static final ParseField FAILED_DOWNLOADS = new ParseField("failed_downloads"); - private static final ParseField TOTAL_DOWNLOAD_TIME = new ParseField("total_download_time"); - private static final ParseField DATABASES_COUNT = new ParseField("databases_count"); - private static final ParseField SKIPPED_DOWNLOADS = new ParseField("skipped_updates"); - private static final ParseField EXPIRED_DATABASES = new ParseField("expired_databases"); - - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), SUCCESSFUL_DOWNLOADS); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), FAILED_DOWNLOADS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_DOWNLOAD_TIME); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), DATABASES_COUNT); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), SKIPPED_DOWNLOADS); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), EXPIRED_DATABASES); - } + static final ParseField SUCCESSFUL_DOWNLOADS = new ParseField("successful_downloads"); + static final ParseField FAILED_DOWNLOADS = new ParseField("failed_downloads"); + static final ParseField TOTAL_DOWNLOAD_TIME = new ParseField("total_download_time"); + static final ParseField DATABASES_COUNT = new ParseField("databases_count"); + static final ParseField SKIPPED_DOWNLOADS = new ParseField("skipped_updates"); + static final ParseField EXPIRED_DATABASES = new ParseField("expired_databases"); private final int successfulDownloads; private final int failedDownloads; @@ -62,7 +46,7 @@ public GeoIpDownloaderStats(StreamInput in) throws IOException { expiredDatabases = in.readVInt(); } - private GeoIpDownloaderStats( + GeoIpDownloaderStats( int successfulDownloads, int failedDownloads, long totalDownloadTime, @@ -170,10 +154,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static GeoIpDownloaderStats fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(successfulDownloads); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java index 68b1ac4b28ff7..69e9cc9b5f5e5 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsSerializingTests.java @@ -10,15 +10,30 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; public class GeoIpDownloaderStatsSerializingTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "geoip_downloader_stats", + a -> new GeoIpDownloaderStats((int) a[0], (int) a[1], (long) a[2], (int) a[3], (int) a[4], a[5] == null ? 0 : (int) a[5]) + ); + + static { + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.SUCCESSFUL_DOWNLOADS); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.FAILED_DOWNLOADS); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.TOTAL_DOWNLOAD_TIME); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.DATABASES_COUNT); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), GeoIpDownloaderStats.SKIPPED_DOWNLOADS); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), GeoIpDownloaderStats.EXPIRED_DATABASES); + } + @Override protected GeoIpDownloaderStats doParseInstance(XContentParser parser) throws IOException { - return GeoIpDownloaderStats.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 230a8154b64ce..fdef41acb16da 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -309,7 +309,7 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t * {@link DocWriteResponse} objects. It always parses the current token, updates the given parsing context accordingly * if needed and then immediately returns. */ - protected static void parseInnerToXContent(XContentParser parser, Builder context) throws IOException { + public static void parseInnerToXContent(XContentParser parser, Builder context) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 7d883ad60b4e7..6d052c242c55c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -23,9 +22,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -35,13 +31,11 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Returns the list of tasks currently running on the nodes */ public class ListTasksResponse extends BaseTasksResponse { - private static final String TASKS = "tasks"; + public static final String TASKS = "tasks"; private final List tasks; @@ -69,35 +63,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(tasks); } - protected static ConstructingObjectParser setupParser( - String name, - TriFunction, List, List, T> ctor - ) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(name, true, constructingObjects -> { - int i = 0; - @SuppressWarnings("unchecked") - List tasks = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List tasksFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodeFailures = (List) constructingObjects[i]; - return ctor.apply(tasks, tasksFailures, nodeFailures); - }); - parser.declareObjectArray(optionalConstructorArg(), TaskInfo.PARSER, new ParseField(TASKS)); - parser.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), new ParseField(TASK_FAILURES)); - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - new ParseField(NODE_FAILURES) - ); - return parser; - } - - private static final ConstructingObjectParser PARSER = setupParser( - "list_tasks_response", - ListTasksResponse::new - ); - /** * Returns the list of tasks by node */ @@ -250,10 +215,6 @@ public ChunkedToXContentObject groupedByNone() { })); } - public static ListTasksResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return Strings.toString(ChunkedToXContent.wrapAsToXContent(groupedByNone()), true, true); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java index 354c67cfb416b..8d48141f9e268 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponse.java @@ -14,11 +14,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; @@ -34,12 +31,6 @@ public class VerifyRepositoryResponse extends ActionResponse implements ToXConte static final String NAME = "name"; public static class NodeView implements Writeable, ToXContentObject { - private static final ObjectParser.NamedObjectParser PARSER; - static { - ObjectParser internalParser = new ObjectParser<>(NODES, true, null); - internalParser.declareString(NodeView::setName, new ParseField(NAME)); - PARSER = (p, v, name) -> internalParser.parse(p, new NodeView(name), null); - } final String nodeId; String name; @@ -104,15 +95,6 @@ public int hashCode() { private List nodes; - private static final ObjectParser PARSER = new ObjectParser<>( - VerifyRepositoryResponse.class.getName(), - true, - VerifyRepositoryResponse::new - ); - static { - PARSER.declareNamedObjects(VerifyRepositoryResponse::setNodes, NodeView.PARSER, new ParseField("nodes")); - } - public VerifyRepositoryResponse() {} public VerifyRepositoryResponse(StreamInput in) throws IOException { @@ -157,10 +139,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static VerifyRepositoryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java index a4ec5222e2392..d99cc0b0ef8df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponse.java @@ -12,36 +12,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * A response for a cluster update settings action. */ public class ClusterUpdateSettingsResponse extends AcknowledgedResponse { - private static final ParseField PERSISTENT = new ParseField("persistent"); - private static final ParseField TRANSIENT = new ParseField("transient"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_update_settings_response", - true, - args -> { - return new ClusterUpdateSettingsResponse((boolean) args[0], (Settings) args[1], (Settings) args[2]); - } - ); - static { - declareAcknowledgedField(PARSER); - PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), TRANSIENT); - PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), PERSISTENT); - } + static final ParseField PERSISTENT = new ParseField("persistent"); + static final ParseField TRANSIENT = new ParseField("transient"); final Settings transientSettings; final Settings persistentSettings; @@ -83,10 +66,6 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO builder.endObject(); } - public static ClusterUpdateSettingsResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (super.equals(o)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java index 578800edfb691..941f1c8d30b2c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java @@ -13,18 +13,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Snapshot status response */ @@ -55,23 +50,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(snapshots); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshots_status_response", - true, - (Object[] parsedObjects) -> { - @SuppressWarnings("unchecked") - List snapshots = (List) parsedObjects[0]; - return new SnapshotsStatusResponse(snapshots); - } - ); - static { - PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots")); - } - - public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java index e7568a0c66a37..73cfeb48b96bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponse.java @@ -13,11 +13,9 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.script.ScriptContextInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; @@ -31,28 +29,9 @@ public class GetScriptContextResponse extends ActionResponse implements ToXContentObject { - private static final ParseField CONTEXTS = new ParseField("contexts"); + static final ParseField CONTEXTS = new ParseField("contexts"); final Map contexts; - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_script_context", - true, - (a) -> { - Map contexts = ((List) a[0]).stream() - .collect(Collectors.toMap(ScriptContextInfo::getName, c -> c)); - return new GetScriptContextResponse(contexts); - } - ); - - static { - PARSER.declareObjectArray( - ConstructingObjectParser.constructorArg(), - (parser, ctx) -> ScriptContextInfo.PARSER.apply(parser, ctx), - CONTEXTS - ); - } - GetScriptContextResponse(StreamInput in) throws IOException { super(in); int size = in.readInt(); @@ -70,7 +49,7 @@ public class GetScriptContextResponse extends ActionResponse implements ToXConte } // Parser constructor - private GetScriptContextResponse(Map contexts) { + GetScriptContextResponse(Map contexts) { this.contexts = Map.copyOf(contexts); } @@ -96,10 +75,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static GetScriptContextResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java index 4f388804f2340..98b11de5bffc9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponse.java @@ -9,19 +9,14 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -29,8 +24,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * The response object that will be returned when reloading analyzers */ @@ -38,10 +31,10 @@ public class ReloadAnalyzersResponse extends BroadcastResponse { private final Map reloadDetails; - private static final ParseField RELOAD_DETAILS_FIELD = new ParseField("reload_details"); - private static final ParseField INDEX_FIELD = new ParseField("index"); - private static final ParseField RELOADED_ANALYZERS_FIELD = new ParseField("reloaded_analyzers"); - private static final ParseField RELOADED_NODE_IDS_FIELD = new ParseField("reloaded_node_ids"); + static final ParseField RELOAD_DETAILS_FIELD = new ParseField("reload_details"); + static final ParseField INDEX_FIELD = new ParseField("index"); + static final ParseField RELOADED_ANALYZERS_FIELD = new ParseField("reloaded_analyzers"); + static final ParseField RELOADED_NODE_IDS_FIELD = new ParseField("reloaded_node_ids"); public ReloadAnalyzersResponse(StreamInput in) throws IOException { super(in); @@ -80,48 +73,6 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.endArray(); } - @SuppressWarnings({ "unchecked" }) - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "reload_analyzer", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - List results = (List) arg[1]; - Map reloadedNodeIds = new HashMap<>(); - for (ReloadDetails result : results) { - reloadedNodeIds.put(result.getIndexName(), result); - } - return new ReloadAnalyzersResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()), - reloadedNodeIds - ); - } - ); - - @SuppressWarnings({ "unchecked" }) - private static final ConstructingObjectParser ENTRY_PARSER = new ConstructingObjectParser<>( - "reload_analyzer.entry", - true, - arg -> { - return new ReloadDetails((String) arg[0], new HashSet<>((List) arg[1]), new HashSet<>((List) arg[2])); - } - ); - - static { - declareBroadcastFields(PARSER); - PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, RELOAD_DETAILS_FIELD); - ENTRY_PARSER.declareString(constructorArg(), INDEX_FIELD); - ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_ANALYZERS_FIELD); - ENTRY_PARSER.declareStringArray(constructorArg(), RELOADED_NODE_IDS_FIELD); - } - - public static ReloadAnalyzersResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java index 39d16e35e61a2..605aab5ab02d9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponse.java @@ -11,8 +11,6 @@ import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -21,16 +19,6 @@ */ public class OpenIndexResponse extends ShardsAcknowledgedResponse { - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_index", - true, - args -> new OpenIndexResponse((boolean) args[0], (boolean) args[1]) - ); - - static { - declareAcknowledgedAndShardsAcknowledgedFields(PARSER); - } - public OpenIndexResponse(StreamInput in) throws IOException { super(in, true); } @@ -44,8 +32,4 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); writeShardsAcknowledged(out); } - - public static OpenIndexResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 6ec0be33e3766..aaa06908f72f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -9,23 +9,14 @@ package org.elasticsearch.action.admin.indices.validate.query; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * The response of the validate action. * @@ -36,38 +27,10 @@ public class ValidateQueryResponse extends BroadcastResponse { public static final String VALID_FIELD = "valid"; public static final String EXPLANATIONS_FIELD = "explanations"; - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "validate_query", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ValidateQueryResponse( - (boolean) arg[1], - (List) arg[2], - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - static { - declareBroadcastFields(PARSER); - PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); - PARSER.declareObjectArray(optionalConstructorArg(), QueryExplanation.PARSER, new ParseField(EXPLANATIONS_FIELD)); - } - private final boolean valid; private final List queryExplanations; - ValidateQueryResponse(StreamInput in) throws IOException { - super(in); - valid = in.readBoolean(); - queryExplanations = in.readCollectionAsList(QueryExplanation::new); - } - ValidateQueryResponse( boolean valid, List queryExplanations, @@ -115,8 +78,4 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.endArray(); } } - - public static ValidateQueryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java index 9fd9c5fcd791f..47202998d3193 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java @@ -12,12 +12,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * The response of the delete action. * @@ -64,23 +61,6 @@ public String toString() { return builder.append("]").toString(); } - public static DeleteResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - DocWriteResponse.parseInnerToXContent(parser, context); - } - /** * Builder class for {@link DeleteResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link DocWriteResponse.Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index 58c50df47c3ce..9d8ba5f126fd5 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -17,14 +17,11 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collection; import java.util.Objects; import static org.elasticsearch.common.lucene.Lucene.readExplanation; @@ -35,14 +32,14 @@ */ public class ExplainResponse extends ActionResponse implements ToXContentObject { - private static final ParseField _INDEX = new ParseField("_index"); - private static final ParseField _ID = new ParseField("_id"); + static final ParseField _INDEX = new ParseField("_index"); + static final ParseField _ID = new ParseField("_id"); private static final ParseField MATCHED = new ParseField("matched"); - private static final ParseField EXPLANATION = new ParseField("explanation"); - private static final ParseField VALUE = new ParseField("value"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField DETAILS = new ParseField("details"); - private static final ParseField GET = new ParseField("get"); + static final ParseField EXPLANATION = new ParseField("explanation"); + static final ParseField VALUE = new ParseField("value"); + static final ParseField DESCRIPTION = new ParseField("description"); + static final ParseField DETAILS = new ParseField("details"); + static final ParseField GET = new ParseField("get"); private final String index; private final String id; @@ -136,43 +133,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "explain", - true, - (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); - PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); - final ConstructingObjectParser explanationParser = getExplanationsParser(); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, EXPLANATION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> GetResult.fromXContentEmbedded(p), GET); - } - - @SuppressWarnings("unchecked") - private static ConstructingObjectParser getExplanationsParser() { - final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>( - "explanation", - true, - arg -> { - if ((float) arg[0] > 0) { - return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); - } else { - return Explanation.noMatch((String) arg[1], (Collection) arg[2]); - } - } - ); - explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), VALUE); - explanationParser.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, DETAILS); - return explanationParser; - } - - public static ExplainResponse fromXContent(XContentParser parser, boolean exists) { - return PARSER.apply(parser, exists); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java index e3598c33d5951..c547eb7ba8f30 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexResponse.java @@ -17,13 +17,10 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.List; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A response of an index operation, * @@ -134,23 +131,6 @@ public String toString() { return builder.append("]").toString(); } - public static IndexResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - DocWriteResponse.parseInnerToXContent(parser, context); - } - /** * Builder class for {@link IndexResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java index 107bab447e03e..892db8d4a6d04 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java @@ -40,7 +40,7 @@ public class AcknowledgedResponse extends ActionResponse implements IsAcknowledg public static final String ACKNOWLEDGED_KEY = "acknowledged"; private static final ParseField ACKNOWLEDGED = new ParseField(ACKNOWLEDGED_KEY); - protected static void declareAcknowledgedField(ConstructingObjectParser objectParser) { + public static void declareAcknowledgedField(ConstructingObjectParser objectParser) { objectParser.declareField( constructorArg(), (parser, context) -> parser.booleanValue(), diff --git a/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java index f897d8644d4bb..a00495605dbb5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/ShardsAcknowledgedResponse.java @@ -24,7 +24,7 @@ public class ShardsAcknowledgedResponse extends AcknowledgedResponse { protected static final ParseField SHARDS_ACKNOWLEDGED = new ParseField("shards_acknowledged"); - protected static void declareAcknowledgedAndShardsAcknowledgedFields( + public static void declareAcknowledgedAndShardsAcknowledgedFields( ConstructingObjectParser objectParser ) { declareAcknowledgedField(objectParser); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index fe6db0c0c4a4b..3e8290ad4fb4a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -32,8 +32,8 @@ * Base class for responses of task-related operations */ public class BaseTasksResponse extends ActionResponse { - protected static final String TASK_FAILURES = "task_failures"; - protected static final String NODE_FAILURES = "node_failures"; + public static final String TASK_FAILURES = "task_failures"; + public static final String NODE_FAILURES = "node_failures"; private List taskFailures; private List nodeFailures; diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index c6454dd6cedd8..8c9ae3f43d5c4 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -15,15 +15,12 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public class UpdateResponse extends DocWriteResponse { - private static final String GET = "get"; + static final String GET = "get"; private GetResult getResult; @@ -114,32 +111,6 @@ public String toString() { return builder.append("]").toString(); } - public static UpdateResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - Builder context = new Builder(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - parseXContentFields(parser, context); - } - return context.build(); - } - - /** - * Parse the current token and update the parsing context appropriately. - */ - public static void parseXContentFields(XContentParser parser, Builder context) throws IOException { - XContentParser.Token token = parser.currentToken(); - String currentFieldName = parser.currentName(); - - if (GET.equals(currentFieldName)) { - if (token == XContentParser.Token.START_OBJECT) { - context.setGetResult(GetResult.fromXContentEmbedded(parser)); - } - } else { - DocWriteResponse.parseInnerToXContent(parser, context); - } - } - /** * Builder class for {@link UpdateResponse}. This builder is usually used during xcontent parsing to * temporarily store the parsed values, then the {@link DocWriteResponse.Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java index 887a0d7d5a751..f236a9eff25a2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java @@ -31,7 +31,6 @@ import java.util.Objects; import static java.util.Collections.emptyMap; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -284,16 +283,6 @@ public static ClusterIndexHealth innerFromXContent(XContentParser parser, String return PARSER.apply(parser, index); } - public static ClusterIndexHealth fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String index = parser.currentName(); - ClusterIndexHealth parsed = innerFromXContent(parser, index); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return parsed; - } - @Override public String toString() { return "ClusterIndexHealth{" diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 150948b4e5822..a483dd5cd48e9 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -20,14 +20,9 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; @@ -42,9 +37,7 @@ import static java.lang.Math.min; import static java.util.Collections.emptyList; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.core.TimeValue.timeValueNanos; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * Task storing information about a currently running BulkByScroll request. @@ -380,37 +373,6 @@ public static class Status implements Task.Status, SuccessfullyProcessed { FIELDS_SET.add(SLICES_FIELD); } - static final ConstructingObjectParser, Void> RETRIES_PARSER = new ConstructingObjectParser<>( - "bulk_by_scroll_task_status_retries", - true, - a -> new Tuple<>(((Long) a[0]), (Long) a[1]) - ); - static { - RETRIES_PARSER.declareLong(constructorArg(), new ParseField(RETRIES_BULK_FIELD)); - RETRIES_PARSER.declareLong(constructorArg(), new ParseField(RETRIES_SEARCH_FIELD)); - } - - public static void declareFields(ObjectParser parser) { - parser.declareInt(StatusBuilder::setSliceId, new ParseField(SLICE_ID_FIELD)); - parser.declareLong(StatusBuilder::setTotal, new ParseField(TOTAL_FIELD)); - parser.declareLong(StatusBuilder::setUpdated, new ParseField(UPDATED_FIELD)); - parser.declareLong(StatusBuilder::setCreated, new ParseField(CREATED_FIELD)); - parser.declareLong(StatusBuilder::setDeleted, new ParseField(DELETED_FIELD)); - parser.declareInt(StatusBuilder::setBatches, new ParseField(BATCHES_FIELD)); - parser.declareLong(StatusBuilder::setVersionConflicts, new ParseField(VERSION_CONFLICTS_FIELD)); - parser.declareLong(StatusBuilder::setNoops, new ParseField(NOOPS_FIELD)); - parser.declareObject(StatusBuilder::setRetries, RETRIES_PARSER, new ParseField(RETRIES_FIELD)); - parser.declareLong(StatusBuilder::setThrottled, new ParseField(THROTTLED_RAW_FIELD)); - parser.declareFloat(StatusBuilder::setRequestsPerSecond, new ParseField(REQUESTS_PER_SEC_FIELD)); - parser.declareString(StatusBuilder::setReasonCancelled, new ParseField(CANCELED_FIELD)); - parser.declareLong(StatusBuilder::setThrottledUntil, new ParseField(THROTTLED_UNTIL_RAW_FIELD)); - parser.declareObjectArray( - StatusBuilder::setSliceStatuses, - (p, c) -> StatusOrException.fromXContent(p), - new ParseField(SLICES_FIELD) - ); - } - private final Integer sliceId; private final long total; private final long updated; @@ -571,11 +533,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.endObject(); } - /** - * We need to write a manual parser for this because of {@link StatusOrException}. Since - * {@link StatusOrException#fromXContent(XContentParser)} tries to peek at a field first before deciding - * what needs to be it cannot use an {@link ObjectParser}. - */ public XContentBuilder innerXContent(XContentBuilder builder, Params params) throws IOException { if (sliceId != null) { builder.field(SLICE_ID_FIELD, sliceId); @@ -617,61 +574,6 @@ public XContentBuilder innerXContent(XContentBuilder builder, Params params) thr return builder; } - public static Status fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token; - if (parser.currentToken() == Token.START_OBJECT) { - token = parser.nextToken(); - } else { - token = parser.nextToken(); - } - ensureExpectedToken(Token.START_OBJECT, token, parser); - token = parser.nextToken(); - ensureExpectedToken(Token.FIELD_NAME, token, parser); - return innerFromXContent(parser); - } - - public static Status innerFromXContent(XContentParser parser) throws IOException { - Token token = parser.currentToken(); - String fieldName = parser.currentName(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - StatusBuilder builder = new StatusBuilder(); - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token == Token.START_OBJECT) { - if (fieldName.equals(Status.RETRIES_FIELD)) { - builder.setRetries(Status.RETRIES_PARSER.parse(parser, null)); - } else { - parser.skipChildren(); - } - } else if (token == Token.START_ARRAY) { - if (fieldName.equals(Status.SLICES_FIELD)) { - while ((token = parser.nextToken()) != Token.END_ARRAY) { - builder.addToSliceStatuses(StatusOrException.fromXContent(parser)); - } - } else { - parser.skipChildren(); - } - } else { // else if it is a value - switch (fieldName) { - case Status.SLICE_ID_FIELD -> builder.setSliceId(parser.intValue()); - case Status.TOTAL_FIELD -> builder.setTotal(parser.longValue()); - case Status.UPDATED_FIELD -> builder.setUpdated(parser.longValue()); - case Status.CREATED_FIELD -> builder.setCreated(parser.longValue()); - case Status.DELETED_FIELD -> builder.setDeleted(parser.longValue()); - case Status.BATCHES_FIELD -> builder.setBatches(parser.intValue()); - case Status.VERSION_CONFLICTS_FIELD -> builder.setVersionConflicts(parser.longValue()); - case Status.NOOPS_FIELD -> builder.setNoops(parser.longValue()); - case Status.THROTTLED_RAW_FIELD -> builder.setThrottled(parser.longValue()); - case Status.REQUESTS_PER_SEC_FIELD -> builder.setRequestsPerSecond(parser.floatValue()); - case Status.CANCELED_FIELD -> builder.setReasonCancelled(parser.text()); - case Status.THROTTLED_UNTIL_RAW_FIELD -> builder.setThrottledUntil(parser.longValue()); - } - } - } - return builder.buildStatus(); - } - @Override public String toString() { StringBuilder builder = new StringBuilder(); @@ -937,46 +839,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Since {@link StatusOrException} can contain either an {@link Exception} or a {@link Status} we need to peek - * at a field first before deciding what needs to be parsed since the same object could contains either. - * The {@link #EXPECTED_EXCEPTION_FIELDS} contains the fields that are expected when the serialised object - * was an instance of exception and the {@link Status#FIELDS_SET} is the set of fields expected when the - * serialized object was an instance of Status. - */ - public static StatusOrException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - if (token == null) { - token = parser.nextToken(); - } - if (token == Token.VALUE_NULL) { - return null; - } else { - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - token = parser.nextToken(); - // This loop is present only to ignore unknown tokens. It breaks as soon as we find a field - // that is allowed. - while (token != Token.END_OBJECT) { - ensureExpectedToken(Token.FIELD_NAME, token, parser); - String fieldName = parser.currentName(); - // weird way to ignore unknown tokens - if (Status.FIELDS_SET.contains(fieldName)) { - return new StatusOrException(Status.innerFromXContent(parser)); - } else if (EXPECTED_EXCEPTION_FIELDS.contains(fieldName)) { - return new StatusOrException(ElasticsearchException.innerFromXContent(parser, false)); - } else { - // Ignore unknown tokens - token = parser.nextToken(); - if (token == Token.START_OBJECT || token == Token.START_ARRAY) { - parser.skipChildren(); - } - token = parser.nextToken(); - } - } - throw new XContentParseException("Unable to parse StatusFromException. Expected fields not found."); - } - } - @Override public String toString() { if (exception != null) { diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java index 1cc6810f8e575..cb15e9af8956a 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -15,36 +15,28 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; -import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Profile results for all shards. */ public final class SearchProfileResults implements Writeable, ToXContentFragment { private static final Logger logger = LogManager.getLogger(SearchProfileResults.class); - private static final String ID_FIELD = "id"; + public static final String ID_FIELD = "id"; private static final String NODE_ID_FIELD = "node_id"; private static final String CLUSTER_FIELD = "cluster"; private static final String INDEX_NAME_FIELD = "index"; private static final String SHARD_ID_FIELD = "shard_id"; - private static final String SHARDS_FIELD = "shards"; + public static final String SHARDS_FIELD = "shards"; public static final String PROFILE_FIELD = "profile"; // map key is the composite "id" of form [nodeId][(clusterName:)indexName][shardId] created from SearchShardTarget.toString @@ -117,75 +109,6 @@ public String toString() { return Strings.toString(this); } - public static SearchProfileResults fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - Map profileResults = new HashMap<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.START_ARRAY) { - if (SHARDS_FIELD.equals(parser.currentName())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parseProfileResultsEntry(parser, profileResults); - } - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - parser.skipChildren(); - } - } - return new SearchProfileResults(profileResults); - } - - private static void parseProfileResultsEntry(XContentParser parser, Map searchProfileResults) - throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = null; - List queryProfileResults = new ArrayList<>(); - AggregationProfileShardResult aggProfileShardResult = null; - ProfileResult fetchResult = null; - String id = null; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (ID_FIELD.equals(currentFieldName)) { - id = parser.text(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if ("searches".equals(currentFieldName)) { - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); - } - } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { - aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("dfs".equals(currentFieldName)) { - searchProfileDfsPhaseResult = SearchProfileDfsPhaseResult.fromXContent(parser); - } else if ("fetch".equals(currentFieldName)) { - fetchResult = ProfileResult.fromXContent(parser); - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - SearchProfileShardResult result = new SearchProfileShardResult( - new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), - fetchResult - ); - result.getQueryPhase().setSearchProfileDfsPhaseResult(searchProfileDfsPhaseResult); - searchProfileResults.put(id, result); - } - /** * Parsed representation of a composite id used for shards in a profile. * The composite id format is specified/created via the {@code SearchShardTarget} method. diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java index 8f5712d90487f..bf90d962912c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryResponseTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.action.admin.cluster.repositories.verify; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.util.ArrayList; @@ -15,9 +17,28 @@ public class VerifyRepositoryResponseTests extends AbstractXContentTestCase { + private static final ObjectParser PARSER = new ObjectParser<>( + VerifyRepositoryResponse.class.getName(), + true, + VerifyRepositoryResponse::new + ); + static { + ObjectParser internalParser = new ObjectParser<>( + VerifyRepositoryResponse.NODES, + true, + null + ); + internalParser.declareString(VerifyRepositoryResponse.NodeView::setName, new ParseField(VerifyRepositoryResponse.NAME)); + PARSER.declareNamedObjects( + VerifyRepositoryResponse::setNodes, + (p, v, name) -> internalParser.parse(p, new VerifyRepositoryResponse.NodeView(name), null), + new ParseField("nodes") + ); + } + @Override protected VerifyRepositoryResponse doParseInstance(XContentParser parser) { - return VerifyRepositoryResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java index 3f5692c30cfef..3d46994faacf7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsResponseTests.java @@ -14,17 +14,32 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.util.List; import java.util.Set; import java.util.function.Predicate; +import static org.elasticsearch.action.support.master.AcknowledgedResponse.declareAcknowledgedField; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ClusterUpdateSettingsResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_update_settings_response", + true, + args -> new ClusterUpdateSettingsResponse((boolean) args[0], (Settings) args[1], (Settings) args[2]) + ); + static { + declareAcknowledgedField(PARSER); + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), ClusterUpdateSettingsResponse.TRANSIENT); + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), ClusterUpdateSettingsResponse.PERSISTENT); + } + @Override protected ClusterUpdateSettingsResponse doParseInstance(XContentParser parser) { - return ClusterUpdateSettingsResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java index f90b37f75fa41..21cba892669d0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -17,11 +19,26 @@ import java.util.List; import java.util.function.Predicate; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SnapshotsStatusResponseTests extends AbstractChunkedSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshots_status_response", + true, + (Object[] parsedObjects) -> { + @SuppressWarnings("unchecked") + List snapshots = (List) parsedObjects[0]; + return new SnapshotsStatusResponse(snapshots); + } + ); + static { + PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots")); + } + @Override protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException { - return SnapshotsStatusResponse.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java index efb1e61e19fa2..41faaf3517e76 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptContextResponseTests.java @@ -8,14 +8,38 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.script.ScriptContextInfo; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; public class GetScriptContextResponseTests extends AbstractXContentSerializingTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_script_context", + true, + (a) -> { + Map contexts = ((List) a[0]).stream() + .collect(Collectors.toMap(ScriptContextInfo::getName, c -> c)); + return new GetScriptContextResponse(contexts); + } + ); + + static { + PARSER.declareObjectArray( + ConstructingObjectParser.constructorArg(), + ScriptContextInfo.PARSER::apply, + GetScriptContextResponse.CONTEXTS + ); + } + @Override protected GetScriptContextResponse createTestInstance() { if (randomBoolean()) { @@ -31,7 +55,7 @@ protected Writeable.Reader instanceReader() { @Override protected GetScriptContextResponse doParseInstance(XContentParser parser) throws IOException { - return GetScriptContextResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java index a6524932dd775..f0802e471fc38 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java @@ -8,10 +8,12 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -23,8 +25,52 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.action.support.broadcast.BaseBroadcastResponse.declareBroadcastFields; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "reload_analyzer", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + List results = (List) arg[1]; + Map reloadedNodeIds = new HashMap<>(); + for (ReloadAnalyzersResponse.ReloadDetails result : results) { + reloadedNodeIds.put(result.getIndexName(), result); + } + return new ReloadAnalyzersResponse( + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()), + reloadedNodeIds + ); + } + ); + + @SuppressWarnings({ "unchecked" }) + private static final ConstructingObjectParser ENTRY_PARSER = + new ConstructingObjectParser<>( + "reload_analyzer.entry", + true, + arg -> new ReloadAnalyzersResponse.ReloadDetails( + (String) arg[0], + new HashSet<>((List) arg[1]), + new HashSet<>((List) arg[2]) + ) + ); + + static { + declareBroadcastFields(PARSER); + PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, ReloadAnalyzersResponse.RELOAD_DETAILS_FIELD); + ENTRY_PARSER.declareString(constructorArg(), ReloadAnalyzersResponse.INDEX_FIELD); + ENTRY_PARSER.declareStringArray(constructorArg(), ReloadAnalyzersResponse.RELOADED_ANALYZERS_FIELD); + ENTRY_PARSER.declareStringArray(constructorArg(), ReloadAnalyzersResponse.RELOADED_NODE_IDS_FIELD); + } + @Override protected ReloadAnalyzersResponse createTestInstance( int totalShards, @@ -50,7 +96,7 @@ public static Map createRandomRel @Override protected ReloadAnalyzersResponse doParseInstance(XContentParser parser) throws IOException { - return ReloadAnalyzersResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java index 962304ef8aadc..a8d8980e6358c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/open/OpenIndexResponseTests.java @@ -10,13 +10,26 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; +import static org.elasticsearch.action.support.master.ShardsAcknowledgedResponse.declareAcknowledgedAndShardsAcknowledgedFields; + public class OpenIndexResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "open_index", + true, + args -> new OpenIndexResponse((boolean) args[0], (boolean) args[1]) + ); + + static { + declareAcknowledgedAndShardsAcknowledgedFields(PARSER); + } + @Override protected OpenIndexResponse doParseInstance(XContentParser parser) { - return OpenIndexResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java index 6bbb2884f1bf3..9ec910e79918c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -10,19 +10,53 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; +import static org.elasticsearch.action.support.broadcast.BaseBroadcastResponse.declareBroadcastFields; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class ValidateQueryResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "validate_query", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new ValidateQueryResponse( + (boolean) arg[1], + (List) arg[2], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + static { + declareBroadcastFields(PARSER); + PARSER.declareBoolean(constructorArg(), new ParseField(ValidateQueryResponse.VALID_FIELD)); + PARSER.declareObjectArray( + optionalConstructorArg(), + QueryExplanation.PARSER, + new ParseField(ValidateQueryResponse.EXPLANATIONS_FIELD) + ); + } + private static ValidateQueryResponse createRandomValidateQueryResponse( int totalShards, int successfulShards, @@ -60,7 +94,7 @@ private static ValidateQueryResponse createRandomValidateQueryResponse() { @Override protected ValidateQueryResponse doParseInstance(XContentParser parser) throws IOException { - return ValidateQueryResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 76b1fa0011540..6c45367baf674 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -192,17 +192,16 @@ public static BulkItemResponse itemResponseFromXContent(XContentParser parser, i if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); builder = indexResponseBuilder; - itemParser = (indexParser) -> IndexResponse.parseXContentFields(indexParser, indexResponseBuilder); - + itemParser = indexParser -> DocWriteResponse.parseInnerToXContent(indexParser, indexResponseBuilder); } else if (opType == DocWriteRequest.OpType.UPDATE) { final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); builder = updateResponseBuilder; - itemParser = (updateParser) -> UpdateResponse.parseXContentFields(updateParser, updateResponseBuilder); + itemParser = updateParser -> UpdateResponseTests.parseXContentFields(updateParser, updateResponseBuilder); } else if (opType == DocWriteRequest.OpType.DELETE) { final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); builder = deleteResponseBuilder; - itemParser = (deleteParser) -> DeleteResponse.parseXContentFields(deleteParser, deleteResponseBuilder); + itemParser = deleteParser -> DocWriteResponse.parseInnerToXContent(deleteParser, deleteResponseBuilder); } else { throwUnknownField(currentFieldName, parser); } diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java index e7019a583b729..937ac2d26ebb9 100644 --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.delete; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -26,6 +27,7 @@ import static org.elasticsearch.action.index.IndexResponseTests.assertDocWriteResponse; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class DeleteResponseTests extends ESTestCase { @@ -102,7 +104,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } DeleteResponse parsedDeleteResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedDeleteResponse = DeleteResponse.fromXContent(parser); + parsedDeleteResponse = parseInstance(parser); assertNull(parser.nextToken()); } @@ -112,6 +114,16 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertDocWriteResponse(expectedDeleteResponse, parsedDeleteResponse); } + private static DeleteResponse parseInstance(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + + DeleteResponse.Builder context = new DeleteResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + DocWriteResponse.parseInnerToXContent(parser, context); + } + return context.build(); + } + /** * Returns a tuple of {@link DeleteResponse}s. *

    diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index 31fcfe342eb3a..2830d9408e494 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.index.get.GetResult; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.test.RandomObjects; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -24,6 +25,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Predicate; @@ -34,9 +36,46 @@ public class ExplainResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "explain", + true, + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], exists, (Explanation) arg[2], (GetResult) arg[3]) + ); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse._INDEX); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse._ID); + final ConstructingObjectParser explanationParser = getExplanationsParser(); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, ExplainResponse.EXPLANATION); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> GetResult.fromXContentEmbedded(p), + ExplainResponse.GET + ); + } + + @SuppressWarnings("unchecked") + private static ConstructingObjectParser getExplanationsParser() { + final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>( + "explanation", + true, + arg -> { + if ((float) arg[0] > 0) { + return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); + } else { + return Explanation.noMatch((String) arg[1], (Collection) arg[2]); + } + } + ); + explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), ExplainResponse.VALUE); + explanationParser.declareString(ConstructingObjectParser.constructorArg(), ExplainResponse.DESCRIPTION); + explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, ExplainResponse.DETAILS); + return explanationParser; + } + @Override protected ExplainResponse doParseInstance(XContentParser parser) throws IOException { - return ExplainResponse.fromXContent(parser, randomBoolean()); + return PARSER.apply(parser, randomBoolean()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java index ea9e83021e781..c8a8c3853601d 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java @@ -29,6 +29,7 @@ import static org.elasticsearch.action.support.replication.ReplicationResponseTests.assertShardInfo; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class IndexResponseTests extends ESTestCase { @@ -111,7 +112,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } IndexResponse parsedIndexResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedIndexResponse = IndexResponse.fromXContent(parser); + parsedIndexResponse = parseInstanceFromXContent(parser); assertNull(parser.nextToken()); } @@ -121,6 +122,15 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertDocWriteResponse(expectedIndexResponse, parsedIndexResponse); } + private static IndexResponse parseInstanceFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + IndexResponse.Builder context = new IndexResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + DocWriteResponse.parseInnerToXContent(parser, context); + } + return context.build(); + } + public static void assertDocWriteResponse(DocWriteResponse expected, DocWriteResponse actual) { assertEquals(expected.getIndex(), actual.getIndex()); assertEquals(expected.getId(), actual.getId()); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index 05c974ea9d4d3..d35162287e3ac 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -38,6 +38,7 @@ import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -174,7 +175,7 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws } UpdateResponse parsedUpdateResponse; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsedUpdateResponse = UpdateResponse.fromXContent(parser); + parsedUpdateResponse = parseInstanceFromXContent(parser); assertNull(parser.nextToken()); } @@ -191,6 +192,32 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertToXContentEquivalent(expectedBytes, parsedBytes, xContentType); } + private static UpdateResponse parseInstanceFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + + UpdateResponse.Builder context = new UpdateResponse.Builder(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + parseXContentFields(parser, context); + } + return context.build(); + } + + /** + * Parse the current token and update the parsing context appropriately. + */ + public static void parseXContentFields(XContentParser parser, UpdateResponse.Builder context) throws IOException { + XContentParser.Token token = parser.currentToken(); + String currentFieldName = parser.currentName(); + + if (UpdateResponse.GET.equals(currentFieldName)) { + if (token == XContentParser.Token.START_OBJECT) { + context.setGetResult(GetResult.fromXContentEmbedded(parser)); + } + } else { + DocWriteResponse.parseInnerToXContent(parser, context); + } + } + /** * Returns a tuple of {@link UpdateResponse}s. *

    diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java index 9ee942df1c2b0..637a18547b1b2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java @@ -26,6 +26,7 @@ import java.util.function.Predicate; import java.util.regex.Pattern; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.hamcrest.CoreMatchers.equalTo; public class ClusterIndexHealthTests extends AbstractXContentSerializingTestCase { @@ -101,7 +102,13 @@ protected Writeable.Reader instanceReader() { @Override protected ClusterIndexHealth doParseInstance(XContentParser parser) throws IOException { - return ClusterIndexHealth.fromXContent(parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String index = parser.currentName(); + ClusterIndexHealth parsed = ClusterIndexHealth.innerFromXContent(parser, index); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + return parsed; } @Override diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java index f8162eb987226..48432a0ff4958 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollResponseTests.java @@ -51,7 +51,7 @@ public class BulkByScrollResponseTests extends AbstractXContentTestCase { @@ -177,7 +184,142 @@ protected BulkByScrollTask.Status createTestInstance() { @Override protected BulkByScrollTask.Status doParseInstance(XContentParser parser) throws IOException { - return BulkByScrollTask.Status.fromXContent(parser); + XContentParser.Token token; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + token = parser.nextToken(); + } else { + token = parser.nextToken(); + } + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + return innerParseStatus(parser); + } + + private static final ConstructingObjectParser, Void> RETRIES_PARSER = new ConstructingObjectParser<>( + "bulk_by_scroll_task_status_retries", + true, + a -> new Tuple<>(((Long) a[0]), (Long) a[1]) + ); + static { + RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_BULK_FIELD)); + RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_SEARCH_FIELD)); + } + + public static void declareFields(ObjectParser parser) { + parser.declareInt(BulkByScrollTask.StatusBuilder::setSliceId, new ParseField(BulkByScrollTask.Status.SLICE_ID_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setTotal, new ParseField(BulkByScrollTask.Status.TOTAL_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setUpdated, new ParseField(BulkByScrollTask.Status.UPDATED_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setCreated, new ParseField(BulkByScrollTask.Status.CREATED_FIELD)); + parser.declareLong(BulkByScrollTask.StatusBuilder::setDeleted, new ParseField(BulkByScrollTask.Status.DELETED_FIELD)); + parser.declareInt(BulkByScrollTask.StatusBuilder::setBatches, new ParseField(BulkByScrollTask.Status.BATCHES_FIELD)); + parser.declareLong( + BulkByScrollTask.StatusBuilder::setVersionConflicts, + new ParseField(BulkByScrollTask.Status.VERSION_CONFLICTS_FIELD) + ); + parser.declareLong(BulkByScrollTask.StatusBuilder::setNoops, new ParseField(BulkByScrollTask.Status.NOOPS_FIELD)); + parser.declareObject( + BulkByScrollTask.StatusBuilder::setRetries, + RETRIES_PARSER, + new ParseField(BulkByScrollTask.Status.RETRIES_FIELD) + ); + parser.declareLong(BulkByScrollTask.StatusBuilder::setThrottled, new ParseField(BulkByScrollTask.Status.THROTTLED_RAW_FIELD)); + parser.declareFloat( + BulkByScrollTask.StatusBuilder::setRequestsPerSecond, + new ParseField(BulkByScrollTask.Status.REQUESTS_PER_SEC_FIELD) + ); + parser.declareString(BulkByScrollTask.StatusBuilder::setReasonCancelled, new ParseField(BulkByScrollTask.Status.CANCELED_FIELD)); + parser.declareLong( + BulkByScrollTask.StatusBuilder::setThrottledUntil, + new ParseField(BulkByScrollTask.Status.THROTTLED_UNTIL_RAW_FIELD) + ); + parser.declareObjectArray( + BulkByScrollTask.StatusBuilder::setSliceStatuses, + (p, c) -> parseStatusOrException(p), + new ParseField(BulkByScrollTask.Status.SLICES_FIELD) + ); + } + + private static Status innerParseStatus(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + String fieldName = parser.currentName(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + BulkByScrollTask.StatusBuilder builder = new BulkByScrollTask.StatusBuilder(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (fieldName.equals(Status.RETRIES_FIELD)) { + builder.setRetries(RETRIES_PARSER.parse(parser, null)); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (fieldName.equals(Status.SLICES_FIELD)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + builder.addToSliceStatuses(parseStatusOrException(parser)); + } + } else { + parser.skipChildren(); + } + } else { // else if it is a value + switch (fieldName) { + case Status.SLICE_ID_FIELD -> builder.setSliceId(parser.intValue()); + case Status.TOTAL_FIELD -> builder.setTotal(parser.longValue()); + case Status.UPDATED_FIELD -> builder.setUpdated(parser.longValue()); + case Status.CREATED_FIELD -> builder.setCreated(parser.longValue()); + case Status.DELETED_FIELD -> builder.setDeleted(parser.longValue()); + case Status.BATCHES_FIELD -> builder.setBatches(parser.intValue()); + case Status.VERSION_CONFLICTS_FIELD -> builder.setVersionConflicts(parser.longValue()); + case Status.NOOPS_FIELD -> builder.setNoops(parser.longValue()); + case Status.THROTTLED_RAW_FIELD -> builder.setThrottled(parser.longValue()); + case Status.REQUESTS_PER_SEC_FIELD -> builder.setRequestsPerSecond(parser.floatValue()); + case Status.CANCELED_FIELD -> builder.setReasonCancelled(parser.text()); + case Status.THROTTLED_UNTIL_RAW_FIELD -> builder.setThrottledUntil(parser.longValue()); + } + } + } + return builder.buildStatus(); + } + + /** + * Since {@link BulkByScrollTask.StatusOrException} can contain either an {@link Exception} or a {@link Status} we need to peek + * at a field first before deciding what needs to be parsed since the same object could contains either. + * The {@link BulkByScrollTask.StatusOrException#EXPECTED_EXCEPTION_FIELDS} contains the fields that are expected when the serialised + * object was an instance of exception and the {@link Status#FIELDS_SET} is the set of fields expected when the + * serialized object was an instance of Status. + */ + public static BulkByScrollTask.StatusOrException parseStatusOrException(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token == XContentParser.Token.VALUE_NULL) { + return null; + } else { + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + token = parser.nextToken(); + // This loop is present only to ignore unknown tokens. It breaks as soon as we find a field + // that is allowed. + while (token != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String fieldName = parser.currentName(); + // weird way to ignore unknown tokens + if (Status.FIELDS_SET.contains(fieldName)) { + return new BulkByScrollTask.StatusOrException(innerParseStatus(parser)); + } else if (BulkByScrollTask.StatusOrException.EXPECTED_EXCEPTION_FIELDS.contains(fieldName)) { + return new BulkByScrollTask.StatusOrException(ElasticsearchException.innerFromXContent(parser, false)); + } else { + // Ignore unknown tokens + token = parser.nextToken(); + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + token = parser.nextToken(); + } + } + throw new XContentParseException("Unable to parse StatusFromException. Expected fields not found."); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java index f02114a48991c..bda74e75de88c 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileResultsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -114,7 +115,7 @@ protected SearchProfileResults doParseInstance(XContentParser parser) throws IOE ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - SearchProfileResults result = SearchProfileResults.fromXContent(parser); + SearchProfileResults result = SearchResponseUtils.parseSearchProfileResults(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); return result; diff --git a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java index f47aaee5ff145..169379441aadd 100644 --- a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java @@ -12,11 +12,15 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -31,12 +35,46 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; public class ListTasksResponseTests extends AbstractXContentTestCase { + private static ConstructingObjectParser setupParser( + String name, + TriFunction, List, List, T> ctor + ) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(name, true, constructingObjects -> { + int i = 0; + @SuppressWarnings("unchecked") + List tasks = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List tasksFailures = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List nodeFailures = (List) constructingObjects[i]; + return ctor.apply(tasks, tasksFailures, nodeFailures); + }); + parser.declareObjectArray(optionalConstructorArg(), TaskInfo.PARSER, new ParseField(ListTasksResponse.TASKS)); + parser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> TaskOperationFailure.fromXContent(p), + new ParseField(BaseTasksResponse.TASK_FAILURES) + ); + parser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField(BaseTasksResponse.NODE_FAILURES) + ); + return parser; + } + + private static final ConstructingObjectParser PARSER = setupParser( + "list_tasks_response", + ListTasksResponse::new + ); + // ListTasksResponse doesn't directly implement ToXContent because it has multiple XContent representations, so we must wrap here public record ListTasksResponseWrapper(ListTasksResponse in) implements ToXContentObject { @Override @@ -108,7 +146,7 @@ private static List randomTasks() { @Override protected ListTasksResponseWrapper doParseInstance(XContentParser parser) { - return new ListTasksResponseWrapper(ListTasksResponse.fromXContent(parser)); + return new ListTasksResponseWrapper(PARSER.apply(parser, null)); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 77b25efd56b35..71837ccf14387 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -18,7 +18,13 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.profile.ProfileResult; +import org.elasticsearch.search.profile.SearchProfileDfsPhaseResult; +import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; +import org.elasticsearch.search.profile.SearchProfileShardResult; +import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -27,6 +33,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -186,7 +193,7 @@ public static SearchResponse parseInnerSearchResponse(XContentParser parser) thr } else if (Suggest.NAME.equals(currentFieldName)) { suggest = Suggest.fromXContent(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { - profile = SearchProfileResults.fromXContent(parser); + profile = parseSearchProfileResults(parser); } else if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -389,4 +396,73 @@ private static SearchResponse.Cluster parseCluster(String clusterAlias, XContent timedOut ); } + + public static SearchProfileResults parseSearchProfileResults(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + Map profileResults = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_ARRAY) { + if (SearchProfileResults.SHARDS_FIELD.equals(parser.currentName())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + parseProfileResultsEntry(parser, profileResults); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } + } + return new SearchProfileResults(profileResults); + } + + private static void parseProfileResultsEntry(XContentParser parser, Map searchProfileResults) + throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + SearchProfileDfsPhaseResult searchProfileDfsPhaseResult = null; + List queryProfileResults = new ArrayList<>(); + AggregationProfileShardResult aggProfileShardResult = null; + ProfileResult fetchResult = null; + String id = null; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SearchProfileResults.ID_FIELD.equals(currentFieldName)) { + id = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("searches".equals(currentFieldName)) { + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + queryProfileResults.add(QueryProfileShardResult.fromXContent(parser)); + } + } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { + aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("dfs".equals(currentFieldName)) { + searchProfileDfsPhaseResult = SearchProfileDfsPhaseResult.fromXContent(parser); + } else if ("fetch".equals(currentFieldName)) { + fetchResult = ProfileResult.fromXContent(parser); + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + SearchProfileShardResult result = new SearchProfileShardResult( + new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult), + fetchResult + ); + result.getQueryPhase().setSearchProfileDfsPhaseResult(searchProfileDfsPhaseResult); + searchProfileResults.put(id, result); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java index 749304caf6e20..97c7d6d8cb60d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponse.java @@ -12,18 +12,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; /** * The response object returned by the Explain Lifecycle API. @@ -37,26 +32,6 @@ public class ExplainLifecycleResponse extends ActionResponse implements ToXConte private Map indexResponses; - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "explain_lifecycle_response", - a -> new ExplainLifecycleResponse( - ((List) a[0]).stream() - .collect(Collectors.toMap(IndexLifecycleExplainResponse::getIndex, Function.identity())) - ) - ); - static { - PARSER.declareNamedObjects( - ConstructingObjectParser.constructorArg(), - (p, c, n) -> IndexLifecycleExplainResponse.PARSER.apply(p, c), - INDICES_FIELD - ); - } - - public static ExplainLifecycleResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public ExplainLifecycleResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index 67c5e22902cf2..f06ba16d9da78 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -42,7 +41,6 @@ import java.util.Objects; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class PreviewTransformAction extends ActionType { @@ -154,26 +152,6 @@ public static class Response extends ActionResponse implements ToXContentObject private final List> docs; private final TransformDestIndexSettings generatedDestIndexSettings; - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_transform_preview", - true, - args -> { - @SuppressWarnings("unchecked") - List> docs = (List>) args[0]; - TransformDestIndexSettings generatedDestIndex = (TransformDestIndexSettings) args[1]; - - return new Response(docs, generatedDestIndex); - } - ); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PREVIEW); - PARSER.declareObject( - optionalConstructorArg(), - (p, c) -> TransformDestIndexSettings.fromXContent(p), - GENERATED_DEST_INDEX_SETTINGS - ); - } - public Response(List> docs, TransformDestIndexSettings generatedDestIndexSettings) { this.docs = docs; this.generatedDestIndexSettings = generatedDestIndexSettings; @@ -237,9 +215,5 @@ public int hashCode() { public String toString() { return Strings.toString(this, true, true); } - - public static Response fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java index ab084e66c3ad1..937502281b64d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -19,10 +20,29 @@ import java.io.IOException; import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExplainLifecycleResponseTests extends AbstractXContentSerializingTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "explain_lifecycle_response", + a -> new ExplainLifecycleResponse( + ((List) a[0]).stream() + .collect(Collectors.toMap(IndexLifecycleExplainResponse::getIndex, Function.identity())) + ) + ); + static { + PARSER.declareNamedObjects( + ConstructingObjectParser.constructorArg(), + (p, c, n) -> IndexLifecycleExplainResponse.PARSER.apply(p, c), + ExplainLifecycleResponse.INDICES_FIELD + ); + } + @Override protected ExplainLifecycleResponse createTestInstance() { Map indexResponses = new HashMap<>(); @@ -51,7 +71,7 @@ protected ExplainLifecycleResponse mutateInstance(ExplainLifecycleResponse respo @Override protected ExplainLifecycleResponse doParseInstance(XContentParser parser) throws IOException { - return ExplainLifecycleResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java index 9613fc83efd50..9a573818fb111 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformsActionResponseTests.java @@ -9,8 +9,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction.Response; +import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettings; import org.elasticsearch.xpack.core.transform.transforms.TransformDestIndexSettingsTests; import java.io.IOException; @@ -18,8 +20,31 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class PreviewTransformsActionResponseTests extends AbstractXContentSerializingTestCase { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "data_frame_transform_preview", + true, + args -> { + @SuppressWarnings("unchecked") + List> docs = (List>) args[0]; + TransformDestIndexSettings generatedDestIndex = (TransformDestIndexSettings) args[1]; + + return new Response(docs, generatedDestIndex); + } + ); + + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> p.mapOrdered(), PreviewTransformAction.Response.PREVIEW); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> TransformDestIndexSettings.fromXContent(p), + PreviewTransformAction.Response.GENERATED_DEST_INDEX_SETTINGS + ); + } + public static Response randomPreviewResponse() { int size = randomIntBetween(0, 10); List> data = new ArrayList<>(size); @@ -32,7 +57,7 @@ public static Response randomPreviewResponse() { @Override protected Response doParseInstance(XContentParser parser) throws IOException { - return Response.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 8a544f735b570..659c58d2bd1b8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -146,20 +146,5 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO builder.field(COLLECTION_NAME_FIELD.getPreferredName(), name); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "put_analytics_collection_response", - false, - (p) -> { - return new Response((boolean) p[0], (String) p[1]); - } - ); - static { - PARSER.declareString(constructorArg(), COLLECTION_NAME_FIELD); - } - - public static Response fromXContent(String resourceName, XContentParser parser) throws IOException { - return new Response(AcknowledgedResponse.fromXContent(parser).isAcknowledged(), resourceName); - } - } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java index ce6259b40765e..f6a13477acae7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionResponseBWCSerializingTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.analytics.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; @@ -40,7 +41,7 @@ protected PutAnalyticsCollectionAction.Response mutateInstance(PutAnalyticsColle @Override protected PutAnalyticsCollectionAction.Response doParseInstance(XContentParser parser) throws IOException { - return PutAnalyticsCollectionAction.Response.fromXContent(this.name, parser); + return new PutAnalyticsCollectionAction.Response(AcknowledgedResponse.fromXContent(parser).isAcknowledged(), this.name); } @Override From d0c7da6bb7f9ff3373c71d8b9b89ac4d877f1660 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 22 Mar 2024 11:32:56 +0000 Subject: [PATCH 126/214] Improves Error Handling in Chunked Inference API (#106634) Previously, if one input failed within a batch call to the chunked inference API, the entire request was labeled as failed. This update alters this process, now allowing individual errors to be conveyed in the response. This empowers the caller to address failures specifically and understand the root cause, rather than receiving a vague error message like: 'Expected a chunked inference [chunked_text_embedding_service_results] received [error]'. --- .../results/ErrorChunkedInferenceResults.java | 97 +++++++++++++++++++ .../InferenceNamedWriteablesProvider.java | 8 ++ .../ElasticsearchInternalService.java | 4 + .../services/elser/ElserInternalService.java | 4 + .../ErrorChunkedInferenceResultsTests.java | 43 ++++++++ .../ElasticsearchInternalServiceTests.java | 9 +- .../elser/ElserInternalServiceTests.java | 8 +- 7 files changed, 171 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ErrorChunkedInferenceResultsTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java new file mode 100644 index 0000000000000..eef864f2e8992 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ErrorChunkedInferenceResults.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class ErrorChunkedInferenceResults implements ChunkedInferenceServiceResults { + + public static final String NAME = "error_chunked"; + + private final Exception exception; + + public ErrorChunkedInferenceResults(Exception exception) { + this.exception = Objects.requireNonNull(exception); + } + + public ErrorChunkedInferenceResults(StreamInput in) throws IOException { + this.exception = in.readException(); + } + + public Exception getException() { + return exception; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeException(exception); + } + + @Override + public boolean equals(Object object) { + if (object == this) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + ErrorChunkedInferenceResults that = (ErrorChunkedInferenceResults) object; + // Just compare the message for serialization test purposes + return Objects.equals(exception.getMessage(), that.exception.getMessage()); + } + + @Override + public int hashCode() { + // Just compare the message for serialization test purposes + return Objects.hash(exception.getMessage()); + } + + @Override + public List transformToCoordinationFormat() { + return null; + } + + @Override + public List transformToLegacyFormat() { + return null; + } + + @Override + public Map asMap() { + Map asMap = new LinkedHashMap<>(); + asMap.put(NAME, exception.getMessage()); + return asMap; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(NAME, exception.getMessage()); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index c38b427200744..80fd98c40516e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -16,6 +16,7 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; @@ -62,6 +63,13 @@ public static List getNamedWriteables() { ); // Chunked inference results + namedWriteables.add( + new NamedWriteableRegistry.Entry( + InferenceServiceResults.class, + ErrorChunkedInferenceResults.NAME, + ErrorChunkedInferenceResults::new + ) + ); namedWriteables.add( new NamedWriteableRegistry.Entry( InferenceServiceResults.class, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 1aafa340268f3..a07ebe56a9258 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -28,6 +28,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; @@ -36,6 +37,7 @@ import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; @@ -382,6 +384,8 @@ private List translateChunkedResults(List translateChunkedResults(List { + + public static ErrorChunkedInferenceResults createRandomResults() { + return new ErrorChunkedInferenceResults( + randomBoolean() + ? new ElasticsearchTimeoutException(randomAlphaOfLengthBetween(10, 50)) + : new ElasticsearchStatusException(randomAlphaOfLengthBetween(10, 50), randomFrom(RestStatus.values())) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return ErrorChunkedInferenceResults::new; + } + + @Override + protected ErrorChunkedInferenceResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected ErrorChunkedInferenceResults mutateInstance(ErrorChunkedInferenceResults instance) throws IOException { + return new ErrorChunkedInferenceResults(new RuntimeException(randomAlphaOfLengthBetween(10, 50))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 4f0deaceb17da..0757012b234bd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -24,8 +24,10 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.util.ArrayList; @@ -37,6 +39,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; @@ -346,6 +349,7 @@ public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); mlTrainedModelResults.add(ChunkedTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); ThreadPool threadpool = new TestThreadPool("test"); @@ -372,7 +376,7 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { - assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse, hasSize(3)); assertThat(chunkedResponse.get(0), instanceOf(ChunkedTextEmbeddingResults.class)); var result1 = (ChunkedTextEmbeddingResults) chunkedResponse.get(0); assertEquals( @@ -385,6 +389,9 @@ public void testChunkInfer() { ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunks() ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); + assertThat(result3.getException(), instanceOf(RuntimeException.class)); + assertThat(result3.getException().getMessage(), containsString("boom")); gotResults.set(true); }, ESTestCase::fail); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index 6da634afddeb0..f2fd195ab8c5a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -23,8 +23,10 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import java.util.ArrayList; import java.util.Collections; @@ -330,6 +332,7 @@ public void testChunkInfer() { var mlTrainedModelResults = new ArrayList(); mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); mlTrainedModelResults.add(ChunkedTextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferTrainedModelDeploymentAction.Response(mlTrainedModelResults); ThreadPool threadpool = new TestThreadPool("test"); @@ -357,7 +360,7 @@ public void testChunkInfer() { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { - assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse, hasSize(3)); assertThat(chunkedResponse.get(0), instanceOf(ChunkedSparseEmbeddingResults.class)); var result1 = (ChunkedSparseEmbeddingResults) chunkedResponse.get(0); assertEquals( @@ -370,6 +373,9 @@ public void testChunkInfer() { ((org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunkedResults() ); + var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); + assertThat(result3.getException(), instanceOf(RuntimeException.class)); + assertThat(result3.getException().getMessage(), containsString("boom")); gotResults.set(true); }, ESTestCase::fail); From f3f117a3646cb5ca20c58ec0a7d077ebc664dbc7 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 22 Mar 2024 12:52:30 +0100 Subject: [PATCH 127/214] [DOCS] Update params for Update Connector Filtering API (#106662) --- .../apis/update-connector-filtering-api.asciidoc | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc index 3e81f0fda2ce7..04c40ebf9fa4e 100644 --- a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -55,32 +55,32 @@ Contains the set of rules that are actively used for sync jobs. The `active` obj The value to be used in conjunction with the rule for matching the contents of the document's field. ** `order` (Required, number) + The order in which the rules are applied. The first rule to match has its policy applied. - ** `created_at` (Optional, datetime) + + ** `created_at` (Required, datetime) + The timestamp when the rule was added. - ** `updated_at` (Optional, datetime) + + ** `updated_at` (Required, datetime) + The timestamp when the rule was last edited. - * `advanced_snippet` (Optional, object) + + * `advanced_snippet` (Required, object) + Used for {enterprise-search-ref}/sync-rules.html#sync-rules-advanced[advanced filtering] at query time, with the following sub-attributes: ** `value` (Required, object) + A JSON object passed directly to the connector for advanced filtering. - ** `created_at` (Optional, datetime) + + ** `created_at` (Required, datetime) + The timestamp when this JSON object was created. - ** `updated_at` (Optional, datetime) + + ** `updated_at` (Required, datetime) + The timestamp when this JSON object was last edited. - * `validation` (Optional, object) + + * `validation` (Required, object) + Provides validation status for the rules, including: ** `state` (Required, string) + Indicates the validation state: "edited", "valid", or "invalid". - ** `errors` (Optional, object) + + ** `errors` (Required, object) + Contains details about any validation errors, with sub-attributes: *** `ids` (Required, string) + The ID(s) of any rules deemed invalid. *** `messages` (Required, string) + Messages explaining what is invalid about the rules. -- `draft` (Optional, object) + +- `draft` (Required, object) + An object identical in structure to the `active` object, but used for drafting and editing filtering rules before they become active. From 16787c2b021e67f2b8e60ddaf8802d45fc1a8459 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 22 Mar 2024 13:14:07 +0100 Subject: [PATCH 128/214] [Profiling] Fix spurious test failures (#106660) With this commit we force-merge indices to workaround elastic/elasticsearch#106657. We also revert a test mute that was applied to the wrong test case so all integration tests are executed again. Relates #106309 Relates #106657 Closes #106308 --- .../elasticsearch/xpack/profiling/GetStackTracesActionIT.java | 1 - .../org/elasticsearch/xpack/profiling/ProfilingTestCase.java | 3 +++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 501d564bbda0d..62b8242e7df86 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -42,7 +42,6 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals("vmlinux", response.getExecutables().get("lHp5_WAgpLy2alrUVab6HA")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106308") public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception { BoolQueryBuilder query = QueryBuilders.boolQuery(); query.must().add(QueryBuilders.termQuery("transaction.name", "encodeSha1")); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index 383f46c97f02f..58b018a13e096 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -143,6 +143,9 @@ protected final void doSetupData() throws Exception { bulkIndex("data/apm-legacy-test.ndjson"); refresh(); + + // temporary workaround for #106657, see also #106308. + forceMerge(); } @After From 46b5596767d8f267e0e29cce50673d34993325e9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Mar 2024 12:47:47 +0000 Subject: [PATCH 129/214] Avoid double-sorting results in TransportGetSnapshotsAction (#106644) Today we call `sortSnapshots` twice, once for the per-repository results, and then again for the combined results. The first call has no limit or offset, and the sorting is made redundant by the second call, so only really serves to filter out snapshots which do not match the `?after` parameter. Meanwhile the `?after` parameter filtering is redundant in the second call. This commit separates these two steps to avoid the redundant sorting in the first step and the redundant filtering in the second. --- .../get/TransportGetSnapshotsAction.java | 49 ++++++++++--------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 898adf721be33..6d29c36bdcda1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -164,8 +164,7 @@ private class GetSnapshotsOperation { @Nullable private final String fromSortValue; private final int offset; - @Nullable - private final SnapshotSortKey.After after; + private final Predicate afterPredicate; private final int size; // current state @@ -210,7 +209,6 @@ private class GetSnapshotsOperation { this.order = order; this.fromSortValue = fromSortValue; this.offset = offset; - this.after = after; this.size = size; this.snapshotsInProgress = snapshotsInProgress; this.verbose = verbose; @@ -219,6 +217,7 @@ private class GetSnapshotsOperation { this.snapshotNamePredicate = SnapshotNamePredicate.forSnapshots(ignoreUnavailable, snapshots); this.fromSortValuePredicates = SnapshotPredicates.forFromSortValue(fromSortValue, sortBy, order); this.slmPolicyPredicate = SlmPolicyPredicate.forPolicies(policies); + this.afterPredicate = sortBy.getAfterPredicate(after, order); this.getSnapshotInfoExecutor = new GetSnapshotInfoExecutor( threadPool.info(ThreadPool.Names.SNAPSHOT_META).getMax(), @@ -344,20 +343,15 @@ private void loadSnapshotInfos(String repo, @Nullable RepositoryData repositoryD } else { assert fromSortValuePredicates.isMatchAll() : "filtering is not supported in non-verbose mode"; assert slmPolicyPredicate == SlmPolicyPredicate.MATCH_ALL_POLICIES : "filtering is not supported in non-verbose mode"; - final var currentSnapshots = snapshotsInProgress.forRepo(repo) - .stream() - .map(entry -> SnapshotInfo.inProgress(entry).basic()) - .toList(); - - final SnapshotsInRepo snapshotInfos; - if (repositoryData != null) { - // want non-current snapshots as well, which are found in the repository data - snapshotInfos = buildSimpleSnapshotInfos(toResolve, repo, repositoryData, currentSnapshots); - } else { - // only want current snapshots - snapshotInfos = sortSnapshotsWithNoOffsetOrLimit(currentSnapshots); - } - listener.onResponse(snapshotInfos); + + listener.onResponse( + buildSimpleSnapshotInfos( + toResolve, + repo, + repositoryData, + snapshotsInProgress.forRepo(repo).stream().map(entry -> SnapshotInfo.inProgress(entry).basic()).toList() + ) + ); } } @@ -446,7 +440,7 @@ public void onFailure(Exception e) { .addListener(listener.safeMap(v -> // no need to synchronize access to snapshots: Repository#getSnapshotInfo fails fast but we're on the success path here - sortSnapshotsWithNoOffsetOrLimit(snapshots)), executor, threadPool.getThreadContext()); + applyAfterPredicate(snapshots)), executor, threadPool.getThreadContext()); } private SnapshotsInRepo buildSimpleSnapshotInfos( @@ -455,6 +449,11 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( final RepositoryData repositoryData, final List currentSnapshots ) { + if (repositoryData == null) { + // only want current snapshots + return applyAfterPredicate(currentSnapshots); + } // else want non-current snapshots as well, which are found in the repository data + List snapshotInfos = new ArrayList<>(); for (SnapshotInfo snapshotInfo : currentSnapshots) { assert snapshotInfo.startTime() == 0L && snapshotInfo.endTime() == 0L && snapshotInfo.totalShards() == 0L : snapshotInfo; @@ -483,16 +482,16 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( ) ); } - return sortSnapshotsWithNoOffsetOrLimit(snapshotInfos); + return applyAfterPredicate(snapshotInfos); } - private SnapshotsInRepo sortSnapshotsWithNoOffsetOrLimit(List snapshotInfos) { - return sortSnapshots(snapshotInfos.stream(), snapshotInfos.size(), 0, GetSnapshotsRequest.NO_LIMIT); + private SnapshotsInRepo applyAfterPredicate(List snapshotInfos) { + return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size(), 0); } private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - final var resultsStream = snapshotInfoStream.filter(sortBy.getAfterPredicate(after, order)) + final var resultsStream = snapshotInfoStream.peek(this::assertSatisfiesAllPredicates) .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); if (size == GetSnapshotsRequest.NO_LIMIT) { @@ -513,6 +512,12 @@ private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, i } } + private void assertSatisfiesAllPredicates(SnapshotInfo snapshotInfo) { + assert matchesPredicates(snapshotInfo); + assert afterPredicate.test(snapshotInfo); + assert indices || snapshotInfo.indices().isEmpty(); + } + private boolean matchesPredicates(SnapshotId snapshotId, RepositoryData repositoryData) { if (fromSortValuePredicates.test(snapshotId, repositoryData) == false) { return false; From 391f010f9b5b111efbe649602733dedf8555a182 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 22 Mar 2024 14:01:31 +0100 Subject: [PATCH 130/214] ES|QL: Fix usage of IN operator with TEXT fields (#106654) --- docs/changelog/106654.yaml | 6 ++ .../predicate/operator/comparison/In.java | 4 +- .../xpack/esql/analysis/AnalyzerTests.java | 26 +++++++ .../rest-api-spec/test/esql/80_text.yml | 76 +++++++++++++++++++ 4 files changed, 110 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/106654.yaml diff --git a/docs/changelog/106654.yaml b/docs/changelog/106654.yaml new file mode 100644 index 0000000000000..3443b68482443 --- /dev/null +++ b/docs/changelog/106654.yaml @@ -0,0 +1,6 @@ +pr: 106654 +summary: "ES|QL: Fix usage of IN operator with TEXT fields" +area: ES|QL +type: bug +issues: + - 105379 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index b20160ac936d6..ab2f9079b610c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; +import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.InProcessor; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -67,7 +67,7 @@ protected boolean areCompatible(DataType left, DataType right) { @Override protected TypeResolution resolveType() { // TODO: move the foldability check from QL's In to SQL's and remove this method - TypeResolution resolution = TypeResolutions.isExact(value(), functionName(), DEFAULT); + TypeResolution resolution = EsqlTypeResolutions.isExact(value(), functionName(), DEFAULT); if (resolution.unresolved()) { return resolution; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 975b31b967fe0..543e7c93526d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1775,6 +1775,32 @@ public void testUnsupportedTypesInStats() { ); } + public void testInOnText() { + assertProjectionWithMapping(""" + from a_index + | eval text in (\"a\", \"b\", \"c\") + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text in (\"a\", \"b\", \"c\", text) + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text not in (\"a\", \"b\", \"c\") + | keep text + """, "mapping-multi-field-variation.json", "text"); + + assertProjectionWithMapping(""" + from a_index + | eval text not in (\"a\", \"b\", \"c\", text) + | keep text + """, "mapping-multi-field-variation.json", "text"); + } + private void verifyUnsupported(String query, String errorMessage) { verifyUnsupported(query, errorMessage, "mapping-multi-field-variation.json"); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 09462691688bf..d73efe1788ce3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -121,6 +121,82 @@ setup: - length: { values: 1 } - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } +--- +"IN on text": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag IN ("abc", "baz") | keep emp_no, name, job, tag' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + +--- +"IN on text and itself": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag IN ("abc", tag) | keep emp_no, name, job, tag | sort emp_no' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 2 } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + +--- +"NOT IN on text": + - skip: + version: " - 8.13.99" + reason: "IN on text fixed in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | where tag NOT IN ("abc", "baz") | keep emp_no, name, job, tag' + + - match: { columns.0.name: "emp_no" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "job" } + - match: { columns.2.type: "text" } + - match: { columns.3.name: "tag" } + - match: { columns.3.type: "text" } + + - length: { values: 1 } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + --- "eval and filter text": - do: From bd6f3c7f78c7577a8fabd945f3cefb6f11ed28be Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 22 Mar 2024 09:18:43 -0400 Subject: [PATCH 131/214] Test mute for issue #106647 (#106670) --- .../search/query/PartialHitCountCollectorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java index c7967f0de5411..c63e2499147c1 100644 --- a/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/PartialHitCountCollectorTests.java @@ -118,6 +118,7 @@ public void testHitCountFromWeightDoesNotEarlyTerminate() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106647") public void testCollectedHitCount() throws Exception { Query query = new NonCountingTermQuery(new Term("string", "a1")); int threshold = randomIntBetween(1, 10000); From f02adacceae8d44665f781b0c78331ac80140f62 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Fri, 22 Mar 2024 13:38:21 +0000 Subject: [PATCH 132/214] Fix testDataStreamLifecycleDownsampleRollingRestart (#106664) This removes a redundant thread creation when triggering a rolling restart as the method is already async and drops the check for cluster health as that might hit a node that's being shut down (the master node in particular) The test assertions still hold i.e. the successful downsampling of the source index --- ...StreamLifecycleDownsampleDisruptionIT.java | 84 +------------------ 1 file changed, 4 insertions(+), 80 deletions(-) diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index 65a4d84e921a2..afa2e95e1284c 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -31,9 +31,7 @@ import java.util.Collection; import java.util.List; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; import static org.elasticsearch.xpack.downsample.DataStreamLifecycleDriver.getBackingIndices; import static org.elasticsearch.xpack.downsample.DataStreamLifecycleDriver.putTSDBIndexTemplate; @@ -57,11 +55,10 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105577") @TestLogging(value = "org.elasticsearch.datastreams.lifecycle:TRACE", reason = "debugging") public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { final InternalTestCluster cluster = internalCluster(); - final List masterNodes = cluster.startMasterOnlyNodes(1); + cluster.startMasterOnlyNodes(1); cluster.startDataOnlyNodes(3); ensureStableCluster(cluster.size()); ensureGreen(); @@ -99,36 +96,12 @@ public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { long sleepTime = randomLongBetween(3000, 4500); logger.info("-> giving data stream lifecycle [{}] millis to make some progress before starting the disruption", sleepTime); Thread.sleep(sleepTime); - final CountDownLatch disruptionStart = new CountDownLatch(1); - final CountDownLatch disruptionEnd = new CountDownLatch(1); List backingIndices = getBackingIndices(client(), dataStreamName); // first generation index String sourceIndex = backingIndices.get(0); - new Thread(new Disruptor(cluster, sourceIndex, new DisruptionListener() { - @Override - public void disruptionStart() { - disruptionStart.countDown(); - } - @Override - public void disruptionEnd() { - disruptionEnd.countDown(); - } - }, masterNodes.get(0), (ignored) -> { - try { - cluster.rollingRestart(new InternalTestCluster.RestartCallback() { - @Override - public boolean validateClusterForming() { - return true; - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - })).start(); - - waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty(), 60, TimeUnit.SECONDS); - ensureStableCluster(cluster.numDataAndMasterNodes()); + internalCluster().rollingRestart(new InternalTestCluster.RestartCallback() { + }); // if the source index has already been downsampled and moved into the data stream just use its name directly final String targetIndex = sourceIndex.startsWith("downsample-5m-") ? sourceIndex : "downsample-5m-" + sourceIndex; @@ -147,55 +120,6 @@ public boolean validateClusterForming() { throw new AssertionError(e); } }, 60, TimeUnit.SECONDS); - } - - interface DisruptionListener { - void disruptionStart(); - - void disruptionEnd(); - } - - private class Disruptor implements Runnable { - final InternalTestCluster cluster; - private final String sourceIndex; - private final DisruptionListener listener; - private final String clientNode; - private final Consumer disruption; - - private Disruptor( - final InternalTestCluster cluster, - final String sourceIndex, - final DisruptionListener listener, - final String clientNode, - final Consumer disruption - ) { - this.cluster = cluster; - this.sourceIndex = sourceIndex; - this.listener = listener; - this.clientNode = clientNode; - this.disruption = disruption; - } - - @Override - public void run() { - listener.disruptionStart(); - try { - final String candidateNode = cluster.client(clientNode) - .admin() - .cluster() - .prepareSearchShards(sourceIndex) - .get() - .getNodes()[0].getName(); - logger.info("Candidate node [" + candidateNode + "]"); - disruption.accept(candidateNode); - ensureGreen(sourceIndex); - ensureStableCluster(cluster.numDataAndMasterNodes(), clientNode); - - } catch (Exception e) { - logger.error("Ignoring Error while injecting disruption [" + e.getMessage() + "]"); - } finally { - listener.disruptionEnd(); - } - } + ensureGreen(targetIndex); } } From 9a907704b7300f3364d74047a92e79a265952c6c Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Mar 2024 13:46:46 +0000 Subject: [PATCH 133/214] Move `XContent` -> `SnapshotInfo` parsing out of prod (#106669) The code to parse a `SnapshotInfo` object out of an `XContent` response body is only used in tests, so this commit moves it out of the production codebase and into the test framework. --- .../http/snapshots/RestGetSnapshotsIT.java | 3 +- .../create/CreateSnapshotResponse.java | 30 +- .../create/TransportCreateSnapshotAction.java | 3 +- .../elasticsearch/snapshots/SnapshotInfo.java | 258 ++---------------- .../create/CreateSnapshotResponseTests.java | 3 +- .../snapshots/SnapshotInfoUtils.java | 244 +++++++++++++++++ .../xpack/slm/SnapshotLifecycleTaskTests.java | 5 +- 7 files changed, 285 insertions(+), 261 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/snapshots/SnapshotInfoUtils.java diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java index 88d910b61fa52..b12a70ccb8425 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/snapshots/RestGetSnapshotsIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -524,7 +525,7 @@ private static GetSnapshotsResponse sortedWithLimit( static { GET_SNAPSHOT_PARSER.declareObjectArray( ConstructingObjectParser.constructorArg(), - (p, c) -> SnapshotInfo.SNAPSHOT_INFO_PARSER.apply(p, c).build(), + (p, c) -> SnapshotInfoUtils.snapshotInfoFromXContent(p), new ParseField("snapshots") ); GET_SNAPSHOT_PARSER.declareObject( diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 7062efd301991..4e04506d03d6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -14,12 +14,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotInfo; -import org.elasticsearch.snapshots.SnapshotInfo.SnapshotInfoBuilder; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; @@ -29,24 +25,8 @@ */ public class CreateSnapshotResponse extends ActionResponse implements ToXContentObject { - private static final ObjectParser PARSER = new ObjectParser<>( - CreateSnapshotResponse.class.getName(), - true, - CreateSnapshotResponse::new - ); - - static { - PARSER.declareObject( - CreateSnapshotResponse::setSnapshotInfoFromBuilder, - SnapshotInfo.SNAPSHOT_INFO_PARSER, - new ParseField("snapshot") - ); - } - @Nullable - private SnapshotInfo snapshotInfo; - - CreateSnapshotResponse() {} + private final SnapshotInfo snapshotInfo; public CreateSnapshotResponse(@Nullable SnapshotInfo snapshotInfo) { this.snapshotInfo = snapshotInfo; @@ -57,10 +37,6 @@ public CreateSnapshotResponse(StreamInput in) throws IOException { snapshotInfo = in.readOptionalWriteable(SnapshotInfo::readFrom); } - private void setSnapshotInfoFromBuilder(SnapshotInfoBuilder snapshotInfoBuilder) { - this.snapshotInfo = snapshotInfoBuilder.build(); - } - /** * Returns snapshot information if snapshot was completed by the time this method returned or null otherwise. * @@ -103,10 +79,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static CreateSnapshotResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public String toString() { return "CreateSnapshotResponse{" + "snapshotInfo=" + snapshotInfo + '}'; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java index 8d776b7ae6ecb..02592b722c9e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/TransportCreateSnapshotAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -68,7 +69,7 @@ protected void masterOperation( if (request.waitForCompletion()) { snapshotsService.executeSnapshot(request, listener.map(CreateSnapshotResponse::new)); } else { - snapshotsService.createSnapshot(request, listener.map(snapshot -> new CreateSnapshotResponse())); + snapshotsService.createSnapshot(request, listener.map(snapshot -> new CreateSnapshotResponse((SnapshotInfo) null))); } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 243df88cfab00..8a1f68c867943 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -23,7 +23,6 @@ import org.elasticsearch.repositories.RepositoryShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentFragment; @@ -53,237 +52,40 @@ public final class SnapshotInfo implements Comparable, ToXContentF public static final String INCLUDE_REPOSITORY_XCONTENT_PARAM = "include_repository"; private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time"); - private static final String SNAPSHOT = "snapshot"; - private static final String UUID = "uuid"; - private static final String REPOSITORY = "repository"; - private static final String INDICES = "indices"; - private static final String DATA_STREAMS = "data_streams"; - private static final String STATE = "state"; - private static final String REASON = "reason"; - private static final String START_TIME = "start_time"; - private static final String START_TIME_IN_MILLIS = "start_time_in_millis"; - private static final String END_TIME = "end_time"; - private static final String END_TIME_IN_MILLIS = "end_time_in_millis"; - private static final String DURATION = "duration"; - private static final String DURATION_IN_MILLIS = "duration_in_millis"; - private static final String FAILURES = "failures"; - private static final String SHARDS = "shards"; - private static final String TOTAL = "total"; - private static final String FAILED = "failed"; - private static final String SUCCESSFUL = "successful"; - private static final String VERSION_ID = "version_id"; - private static final String VERSION = "version"; - private static final String NAME = "name"; - private static final String TOTAL_SHARDS = "total_shards"; - private static final String SUCCESSFUL_SHARDS = "successful_shards"; - private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; - private static final String USER_METADATA = "metadata"; - private static final String FEATURE_STATES = "feature_states"; - private static final String INDEX_DETAILS = "index_details"; - - private static final String UNKNOWN_REPO_NAME = "_na_"; + + static final String SNAPSHOT = "snapshot"; + static final String UUID = "uuid"; + static final String REPOSITORY = "repository"; + static final String INDICES = "indices"; + static final String DATA_STREAMS = "data_streams"; + static final String STATE = "state"; + static final String REASON = "reason"; + static final String START_TIME = "start_time"; + static final String START_TIME_IN_MILLIS = "start_time_in_millis"; + static final String END_TIME = "end_time"; + static final String END_TIME_IN_MILLIS = "end_time_in_millis"; + static final String DURATION = "duration"; + static final String DURATION_IN_MILLIS = "duration_in_millis"; + static final String FAILURES = "failures"; + static final String SHARDS = "shards"; + static final String TOTAL = "total"; + static final String FAILED = "failed"; + static final String SUCCESSFUL = "successful"; + static final String VERSION_ID = "version_id"; + static final String VERSION = "version"; + static final String NAME = "name"; + static final String TOTAL_SHARDS = "total_shards"; + static final String SUCCESSFUL_SHARDS = "successful_shards"; + static final String INCLUDE_GLOBAL_STATE = "include_global_state"; + static final String USER_METADATA = "metadata"; + static final String FEATURE_STATES = "feature_states"; + static final String INDEX_DETAILS = "index_details"; + + static final String UNKNOWN_REPO_NAME = "_na_"; private static final Comparator COMPARATOR = Comparator.comparing(SnapshotInfo::startTime) .thenComparing(SnapshotInfo::snapshotId); - public static final class SnapshotInfoBuilder { - private String snapshotName = null; - private String snapshotUUID = null; - private String repository = UNKNOWN_REPO_NAME; - private String state = null; - private String reason = null; - private List indices = null; - private List dataStreams = null; - private List featureStates = null; - private Map indexSnapshotDetails = null; - private long startTime = 0L; - private long endTime = 0L; - private ShardStatsBuilder shardStatsBuilder = null; - private Boolean includeGlobalState = null; - private Map userMetadata = null; - private int version = -1; - private List shardFailures = null; - - private void setSnapshotName(String snapshotName) { - this.snapshotName = snapshotName; - } - - private void setSnapshotUUID(String snapshotUUID) { - this.snapshotUUID = snapshotUUID; - } - - private void setRepository(String repository) { - this.repository = repository; - } - - private void setState(String state) { - this.state = state; - } - - private void setReason(String reason) { - this.reason = reason; - } - - private void setIndices(List indices) { - this.indices = indices; - } - - private void setDataStreams(List dataStreams) { - this.dataStreams = dataStreams; - } - - private void setFeatureStates(List featureStates) { - this.featureStates = featureStates; - } - - private void setIndexSnapshotDetails(Map indexSnapshotDetails) { - this.indexSnapshotDetails = indexSnapshotDetails; - } - - private void setStartTime(long startTime) { - this.startTime = startTime; - } - - private void setEndTime(long endTime) { - this.endTime = endTime; - } - - private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { - this.shardStatsBuilder = shardStatsBuilder; - } - - private void setIncludeGlobalState(Boolean includeGlobalState) { - this.includeGlobalState = includeGlobalState; - } - - private void setUserMetadata(Map userMetadata) { - this.userMetadata = userMetadata; - } - - private void setVersion(int version) { - this.version = version; - } - - private void setShardFailures(List shardFailures) { - this.shardFailures = shardFailures; - } - - public SnapshotInfo build() { - final Snapshot snapshot = new Snapshot(repository, new SnapshotId(snapshotName, snapshotUUID)); - - if (indices == null) { - indices = Collections.emptyList(); - } - - if (dataStreams == null) { - dataStreams = Collections.emptyList(); - } - - if (featureStates == null) { - featureStates = Collections.emptyList(); - } - - if (indexSnapshotDetails == null) { - indexSnapshotDetails = Collections.emptyMap(); - } - - SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); - IndexVersion version = this.version == -1 ? IndexVersion.current() : IndexVersion.fromId(this.version); - - int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getTotalShards(); - int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getSuccessfulShards(); - - if (shardFailures == null) { - shardFailures = new ArrayList<>(); - } - - return new SnapshotInfo( - snapshot, - indices, - dataStreams, - featureStates, - reason, - version, - startTime, - endTime, - totalShards, - successfulShards, - shardFailures, - includeGlobalState, - userMetadata, - snapshotState, - indexSnapshotDetails - ); - } - } - - private static final class ShardStatsBuilder { - private int totalShards; - private int successfulShards; - - private void setTotalShards(int totalShards) { - this.totalShards = totalShards; - } - - int getTotalShards() { - return totalShards; - } - - private void setSuccessfulShards(int successfulShards) { - this.successfulShards = successfulShards; - } - - int getSuccessfulShards() { - return successfulShards; - } - } - - public static final ObjectParser SNAPSHOT_INFO_PARSER = new ObjectParser<>( - SnapshotInfoBuilder.class.getName(), - true, - SnapshotInfoBuilder::new - ); - - private static final ObjectParser SHARD_STATS_PARSER = new ObjectParser<>( - ShardStatsBuilder.class.getName(), - true, - ShardStatsBuilder::new - ); - - static { - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setRepository, new ParseField(REPOSITORY)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); - SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); - SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); - SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setDataStreams, new ParseField(DATA_STREAMS)); - SNAPSHOT_INFO_PARSER.declareObjectArray( - SnapshotInfoBuilder::setFeatureStates, - SnapshotFeatureInfo.SNAPSHOT_FEATURE_INFO_PARSER, - new ParseField(FEATURE_STATES) - ); - SNAPSHOT_INFO_PARSER.declareObject( - SnapshotInfoBuilder::setIndexSnapshotDetails, - (p, c) -> p.map(HashMap::new, p2 -> IndexSnapshotDetails.PARSER.parse(p2, c)), - new ParseField(INDEX_DETAILS) - ); - SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); - SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); - SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); - SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); - SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setUserMetadata, (p, c) -> p.map(), new ParseField(USER_METADATA)); - SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); - SNAPSHOT_INFO_PARSER.declareObjectArray( - SnapshotInfoBuilder::setShardFailures, - SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, - new ParseField(FAILURES) - ); - - SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL)); - SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL)); - } - private final Snapshot snapshot; @Nullable diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java index cb3c9a3557d61..87cb67a53fc37 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotInfoTestUtils; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -30,7 +31,7 @@ public class CreateSnapshotResponseTests extends AbstractXContentTestCase CREATE_SNAPSHOT_RESPONSE_PARSER = new ConstructingObjectParser<>( + CreateSnapshotResponse.class.getName(), + true, + args -> new CreateSnapshotResponse(((SnapshotInfoBuilder) args[0]).build()) + ); + + static final ObjectParser SNAPSHOT_INFO_PARSER = new ObjectParser<>( + SnapshotInfoBuilder.class.getName(), + true, + SnapshotInfoBuilder::new + ); + + static final ConstructingObjectParser SHARD_STATS_PARSER = new ConstructingObjectParser<>( + ShardStatsBuilder.class.getName(), + true, + args -> new ShardStatsBuilder((int) Objects.requireNonNullElse(args[0], 0), (int) Objects.requireNonNullElse(args[1], 0)) + ); + + static { + SHARD_STATS_PARSER.declareInt(optionalConstructorArg(), new ParseField(TOTAL)); + SHARD_STATS_PARSER.declareInt(optionalConstructorArg(), new ParseField(SUCCESSFUL)); + + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setRepository, new ParseField(REPOSITORY)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setDataStreams, new ParseField(DATA_STREAMS)); + SNAPSHOT_INFO_PARSER.declareObjectArray( + SnapshotInfoBuilder::setFeatureStates, + SnapshotFeatureInfo.SNAPSHOT_FEATURE_INFO_PARSER, + new ParseField(FEATURE_STATES) + ); + SNAPSHOT_INFO_PARSER.declareObject( + SnapshotInfoBuilder::setIndexSnapshotDetails, + (p, c) -> p.map(HashMap::new, p2 -> SnapshotInfo.IndexSnapshotDetails.PARSER.parse(p2, c)), + new ParseField(INDEX_DETAILS) + ); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); + SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setUserMetadata, (p, c) -> p.map(), new ParseField(USER_METADATA)); + SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); + SNAPSHOT_INFO_PARSER.declareObjectArray( + SnapshotInfoBuilder::setShardFailures, + SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER, + new ParseField(FAILURES) + ); + + CREATE_SNAPSHOT_RESPONSE_PARSER.declareObject(optionalConstructorArg(), SNAPSHOT_INFO_PARSER, new ParseField("snapshot")); + } + + private record ShardStatsBuilder(int totalShards, int successfulShards) {} + + public static final class SnapshotInfoBuilder { + private String snapshotName = null; + private String snapshotUUID = null; + private String repository = UNKNOWN_REPO_NAME; + private String state = null; + private String reason = null; + private List indices = null; + private List dataStreams = null; + private List featureStates = null; + private Map indexSnapshotDetails = null; + private long startTime = 0L; + private long endTime = 0L; + private ShardStatsBuilder shardStatsBuilder = null; + private Boolean includeGlobalState = null; + private Map userMetadata = null; + private int version = -1; + private List shardFailures = null; + + private void setSnapshotName(String snapshotName) { + this.snapshotName = snapshotName; + } + + private void setSnapshotUUID(String snapshotUUID) { + this.snapshotUUID = snapshotUUID; + } + + private void setRepository(String repository) { + this.repository = repository; + } + + private void setState(String state) { + this.state = state; + } + + private void setReason(String reason) { + this.reason = reason; + } + + private void setIndices(List indices) { + this.indices = indices; + } + + private void setDataStreams(List dataStreams) { + this.dataStreams = dataStreams; + } + + private void setFeatureStates(List featureStates) { + this.featureStates = featureStates; + } + + private void setIndexSnapshotDetails(Map indexSnapshotDetails) { + this.indexSnapshotDetails = indexSnapshotDetails; + } + + private void setStartTime(long startTime) { + this.startTime = startTime; + } + + private void setEndTime(long endTime) { + this.endTime = endTime; + } + + private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { + this.shardStatsBuilder = shardStatsBuilder; + } + + private void setIncludeGlobalState(Boolean includeGlobalState) { + this.includeGlobalState = includeGlobalState; + } + + private void setUserMetadata(Map userMetadata) { + this.userMetadata = userMetadata; + } + + private void setVersion(int version) { + this.version = version; + } + + private void setShardFailures(List shardFailures) { + this.shardFailures = shardFailures; + } + + public SnapshotInfo build() { + final Snapshot snapshot = new Snapshot(repository, new SnapshotId(snapshotName, snapshotUUID)); + + if (indices == null) { + indices = Collections.emptyList(); + } + + if (dataStreams == null) { + dataStreams = Collections.emptyList(); + } + + if (featureStates == null) { + featureStates = Collections.emptyList(); + } + + if (indexSnapshotDetails == null) { + indexSnapshotDetails = Collections.emptyMap(); + } + + SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); + IndexVersion version = this.version == -1 ? IndexVersion.current() : IndexVersion.fromId(this.version); + + int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.totalShards(); + int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.successfulShards(); + + if (shardFailures == null) { + shardFailures = new ArrayList<>(); + } + + return new SnapshotInfo( + snapshot, + indices, + dataStreams, + featureStates, + reason, + version, + startTime, + endTime, + totalShards, + successfulShards, + shardFailures, + includeGlobalState, + userMetadata, + snapshotState, + indexSnapshotDetails + ); + } + } + + public static CreateSnapshotResponse createSnapshotResponseFromXContent(XContentParser parser) { + return CREATE_SNAPSHOT_RESPONSE_PARSER.apply(parser, null); + } + + public static SnapshotInfo snapshotInfoFromXContent(XContentParser parser) { + return SNAPSHOT_INFO_PARSER.apply(parser, null).build(); + } +} diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java index 729cb8ef47292..f54cd4d4977d7 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotLifecycleTaskTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotInfoUtils; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -194,7 +195,9 @@ public void testCreateSnapshotOnTrigger() { assertThat(req.includeGlobalState(), equalTo(globalState)); try { - return CreateSnapshotResponse.fromXContent(createParser(JsonXContent.jsonXContent, createSnapResponse)); + return SnapshotInfoUtils.createSnapshotResponseFromXContent( + createParser(JsonXContent.jsonXContent, createSnapResponse) + ); } catch (IOException e) { fail("failed to parse snapshot response"); return null; From faf7b7fd382dbe207a69752436971ca59f32ea5d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 22 Mar 2024 14:50:01 +0100 Subject: [PATCH 134/214] Add more debugging to 25_id_generation/generates a consistent id test (#106667) Include the _ts_routing_hash field in the search response for more debugging. In order to add support for docvalue_fields feature for this field type, a custom DOC_VALUE_FORMAT is needed in order to decode to valid utf8 string. Relates #106550 --- .../test/tsdb/25_id_generation.yml | 3 +++ .../TimeSeriesRoutingHashFieldMapper.java | 25 +++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 04fa2faca209f..621906820e4ad 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -110,10 +110,13 @@ generates a consistent id: query: match_all: {} sort: ["@timestamp"] + _source: true + docvalue_fields: [_ts_routing_hash] - match: {hits.total.value: 9} - match: { hits.hits.0._id: cn4excfoxSs_KdA5AAABeRnRFAY } + - match: { hits.hits.0.fields._ts_routing_hash: [ cn4exQ ] } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:50:03.142Z } - match: { hits.hits.0._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java index 090fe7839b3e9..d5750600a25c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java @@ -10,6 +10,8 @@ import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersions; @@ -20,8 +22,10 @@ import org.elasticsearch.index.fielddata.plain.SortedOrdinalsIndexFieldData; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.script.field.DelegateDocValuesField; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import java.time.ZoneId; import java.util.Base64; import java.util.Collections; @@ -43,6 +47,22 @@ public class TimeSeriesRoutingHashFieldMapper extends MetadataFieldMapper { static final class TimeSeriesRoutingHashFieldType extends MappedFieldType { private static final TimeSeriesRoutingHashFieldType INSTANCE = new TimeSeriesRoutingHashFieldType(); + private static final DocValueFormat DOC_VALUE_FORMAT = new DocValueFormat() { + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) {} + + @Override + public Object format(BytesRef value) { + return Uid.decodeId(value.bytes, value.offset, value.length); + } + + }; private TimeSeriesRoutingHashFieldType() { super(NAME, false, false, true, TextSearchInfo.NONE, Collections.emptyMap()); @@ -75,6 +95,11 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext public Query termQuery(Object value, SearchExecutionContext context) { throw new IllegalArgumentException("[" + NAME + "] is not searchable"); } + + @Override + public DocValueFormat docValueFormat(String format, ZoneId timeZone) { + return DOC_VALUE_FORMAT; + } } private TimeSeriesRoutingHashFieldMapper() { From e92420dc865815995aee526b40dd60c4440a50a7 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Fri, 22 Mar 2024 15:28:30 +0100 Subject: [PATCH 135/214] [DOCS] Update cross cluster search compatability matrix (#106677) --- .../ccs-version-compat-matrix.asciidoc | 42 ++++++++++--------- 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc index bf51042d6adec..5af3c997251dd 100644 --- a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc +++ b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc @@ -1,22 +1,24 @@ -[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] +[cols="^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^,^"] |==== -| 16+^h| Remote cluster version +| 17+^h| Remote cluster version h| Local cluster version - | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 -| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} -| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} -|==== \ No newline at end of file + | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 |8.6 |8.7 |8.8 |8.9 |8.10 |8.11 |8.12 |8.13 +| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}|{yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} |{yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} +| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} +| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon}| {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} +|==== + From cc96a8b7e9335682694eea91fb4bd4736a70ecc3 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Fri, 22 Mar 2024 16:31:51 +0200 Subject: [PATCH 136/214] Add DownsampleMetrics (#106632) A single instance gets created by the `Downsample` plugin and injected to related actions. --- .../xpack/downsample/Downsample.java | 6 ++ .../xpack/downsample/DownsampleMetrics.java | 74 +++++++++++++++++++ .../downsample/DownsampleShardIndexer.java | 9 ++- ...DownsampleShardPersistentTaskExecutor.java | 14 +++- .../TransportDownsampleIndexerAction.java | 7 +- .../DownsampleActionSingleNodeTests.java | 25 ++++++- 6 files changed, 130 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java index 0d3a784e00e53..a6ba4346b1a25 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; +import java.util.Collection; import java.util.List; import java.util.function.Predicate; import java.util.function.Supplier; @@ -133,4 +134,9 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(PersistentTaskParams.class, DownsampleShardTaskParams.NAME, DownsampleShardTaskParams::new) ); } + + @Override + public Collection createComponents(PluginServices services) { + return List.of(new DownsampleMetrics(services.telemetryProvider().getMeterRegistry())); + } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java new file mode 100644 index 0000000000000..576f40a8190f3 --- /dev/null +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.io.IOException; +import java.util.Map; + +/** + * Contains metrics related to downsampling actions. + * It gets initialized as a component by the {@link Downsample} plugin, can be injected to its actions. + * + * In tests, use TestTelemetryPlugin to inject a MeterRegistry for testing purposes + * and check that metrics get recorded as expected. + * + * To add a new metric, you need to: + * - Add a constant for its name, following the naming conventions for metrics. + * - Register it in method {@link #doStart}. + * - Add a function for recording its value. + * - If needed, inject {@link DownsampleMetrics} to the action containing the logic + * that records the metric value. For reference, see {@link TransportDownsampleIndexerAction}. + */ +public class DownsampleMetrics extends AbstractLifecycleComponent { + + public static final String LATENCY_SHARD = "es.tsdb.downsample.latency.shard.histogram"; + + private final MeterRegistry meterRegistry; + + public DownsampleMetrics(MeterRegistry meterRegistry) { + this.meterRegistry = meterRegistry; + } + + @Override + protected void doStart() { + // Register all metrics to track. + meterRegistry.registerLongHistogram(LATENCY_SHARD, "Downsampling action latency per shard", "ms"); + } + + @Override + protected void doStop() {} + + @Override + protected void doClose() throws IOException {} + + enum ShardActionStatus { + + SUCCESS("success"), + MISSING_DOCS("missing_docs"), + FAILED("failed"); + + public static final String NAME = "status"; + + private final String message; + + ShardActionStatus(String message) { + this.message = message; + } + + String getMessage() { + return message; + } + } + + void recordLatencyShard(long durationInMilliSeconds, ShardActionStatus status) { + meterRegistry.getLongHistogram(LATENCY_SHARD).record(durationInMilliSeconds, Map.of(ShardActionStatus.NAME, status.getMessage())); + } +} diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 844c644ee9ea6..773dfbe897b50 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -85,6 +85,7 @@ class DownsampleShardIndexer { public static final ByteSizeValue DOWNSAMPLE_MAX_BYTES_IN_FLIGHT = new ByteSizeValue(50, ByteSizeUnit.MB); private final IndexShard indexShard; private final Client client; + private final DownsampleMetrics downsampleMetrics; private final String downsampleIndex; private final Engine.Searcher searcher; private final SearchExecutionContext searchExecutionContext; @@ -103,6 +104,7 @@ class DownsampleShardIndexer { final DownsampleShardTask task, final Client client, final IndexService indexService, + final DownsampleMetrics downsampleMetrics, final ShardId shardId, final String downsampleIndex, final DownsampleConfig config, @@ -113,6 +115,7 @@ class DownsampleShardIndexer { ) { this.task = task; this.client = client; + this.downsampleMetrics = downsampleMetrics; this.indexShard = indexService.getShard(shardId.id()); this.downsampleIndex = downsampleIndex; this.searcher = indexShard.acquireSearcher("downsampling"); @@ -164,6 +167,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept timeSeriesSearcher.search(initialStateQuery, bucketCollector); } + TimeValue duration = TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime); logger.info( "Shard [{}] successfully sent [{}], received source doc [{}], indexed downsampled doc [{}], failed [{}], took [{}]", indexShard.shardId(), @@ -171,7 +175,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept task.getNumSent(), task.getNumIndexed(), task.getNumFailed(), - TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime) + duration ); if (task.getNumIndexed() != task.getNumSent()) { @@ -187,6 +191,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumSent() + "]"; logger.info(error); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.MISSING_DOCS); throw new DownsampleShardIndexerException(error, false); } @@ -199,6 +204,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumFailed() + "]"; logger.info(error); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.FAILED); throw new DownsampleShardIndexerException(error, false); } @@ -208,6 +214,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept ActionListener.noop() ); logger.info("Downsampling task [" + task.getPersistentTaskId() + " on shard " + indexShard.shardId() + " completed"); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.SUCCESS); return new DownsampleIndexerAction.ShardDownsampleResponse(indexShard.shardId(), task.getNumIndexed()); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index b4116d42d25ca..5e6f8b6b5b18e 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -188,6 +188,7 @@ private static IndexShardRoutingTable findShardRoutingTable(ShardId shardId, Clu static void realNodeOperation( Client client, IndicesService indicesService, + DownsampleMetrics downsampleMetrics, DownsampleShardTask task, DownsampleShardTaskParams params, BytesRef lastDownsampledTsid @@ -209,6 +210,7 @@ protected void doRun() throws Exception { task, client, indicesService.indexServiceSafe(params.shardId().getIndex()), + downsampleMetrics, params.shardId(), params.downsampleIndex(), params.downsampleConfig(), @@ -303,17 +305,25 @@ public static class TA extends TransportAction { private final Client client; private final IndicesService indicesService; + private final DownsampleMetrics downsampleMetrics; @Inject - public TA(TransportService transportService, ActionFilters actionFilters, Client client, IndicesService indicesService) { + public TA( + TransportService transportService, + ActionFilters actionFilters, + Client client, + IndicesService indicesService, + DownsampleMetrics downsampleMetrics + ) { super(NAME, actionFilters, transportService.getTaskManager()); this.client = client; this.indicesService = indicesService; + this.downsampleMetrics = downsampleMetrics; } @Override protected void doExecute(Task t, Request request, ActionListener listener) { - realNodeOperation(client, indicesService, request.task, request.params, request.lastDownsampleTsid); + realNodeOperation(client, indicesService, downsampleMetrics, request.task, request.params, request.lastDownsampleTsid); listener.onResponse(ActionResponse.Empty.INSTANCE); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java index 24d1df638f80b..f7cfe2d859583 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java @@ -52,6 +52,8 @@ public class TransportDownsampleIndexerAction extends TransportBroadcastAction< private final ClusterService clusterService; private final IndicesService indicesService; + private final DownsampleMetrics downsampleMetrics; + @Inject public TransportDownsampleIndexerAction( Client client, @@ -59,7 +61,8 @@ public TransportDownsampleIndexerAction( TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver + IndexNameExpressionResolver indexNameExpressionResolver, + DownsampleMetrics downsampleMetrics ) { super( DownsampleIndexerAction.NAME, @@ -74,6 +77,7 @@ public TransportDownsampleIndexerAction( this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); this.clusterService = clusterService; this.indicesService = indicesService; + this.downsampleMetrics = downsampleMetrics; } @Override @@ -139,6 +143,7 @@ protected DownsampleIndexerAction.ShardDownsampleResponse shardOperation( (DownsampleShardTask) task, client, indexService, + downsampleMetrics, request.shardId(), request.getDownsampleIndex(), request.getRollupConfig(), diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index a7b36bbd7dc9b..4c5fdc23e04f9 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -57,6 +57,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchResponseUtils; @@ -80,6 +81,8 @@ import org.elasticsearch.tasks.TaskCancelHelper; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -93,6 +96,7 @@ import org.elasticsearch.xpack.core.ilm.RolloverAction; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.ilm.IndexLifecycle; +import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; @@ -162,7 +166,8 @@ protected Collection> getPlugins() { Downsample.class, AggregateMetricMapperPlugin.class, DataStreamsPlugin.class, - IndexLifecycle.class + IndexLifecycle.class, + TestTelemetryPlugin.class ); } @@ -623,6 +628,7 @@ public void testCancelDownsampleIndexer() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -672,6 +678,7 @@ public void testDownsampleBulkFailed() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -739,6 +746,7 @@ public void testTooManyBytesInFlight() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -791,6 +799,7 @@ public void testDownsampleStats() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -810,6 +819,18 @@ public void testDownsampleStats() throws IOException { assertDownsampleIndexer(indexService, shardNum, task, executeResponse, task.getTotalShardDocCount()); } + + // Check that metrics get collected as expected. + final TestTelemetryPlugin plugin = getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + List measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_SHARD); + assertFalse(measurements.isEmpty()); + for (Measurement measurement : measurements) { + assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); + } } public void testResumeDownsample() throws IOException { @@ -848,6 +869,7 @@ public void testResumeDownsample() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, @@ -923,6 +945,7 @@ public void testResumeDownsamplePartial() throws IOException { task, client(), indexService, + getInstanceFromNode(DownsampleMetrics.class), shard.shardId(), downsampleIndex, config, From bdce52ebf768658f355168c38fdf9e342389c94b Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 22 Mar 2024 15:33:32 +0100 Subject: [PATCH 137/214] Assert that FsBlobContainer.readBlob does not start reading after file length (#106668) This change adds an assertion in the FsBlobContainer.readBlob to ensure we are not reading after the last byte of the file. While this is legal and documented in the SeekableByteChannel.position() API, having this assertion in place would have caught on CI some regression introduced recently and only caught on deployments where S3 rejects reads starting after blob length. --- .../org/elasticsearch/common/blobstore/fs/FsBlobContainer.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index e40ca70460b13..749773cd91eb8 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -197,6 +197,7 @@ public InputStream readBlob(OperationPurpose purpose, String blobName, long posi assert BlobContainer.assertPurposeConsistency(purpose, blobName); final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { + assert position < channel.size() : "reading from " + position + " exceeds file length " + channel.size(); channel.position(position); } assert channel.position() == position; From 0eca8dda92d0b680d9549b2f387677f5f09adf01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Fri, 22 Mar 2024 15:53:39 +0100 Subject: [PATCH 138/214] [Profiling] Switch to OTEL cloud.provider, cloud.region, host.type (#106656) * [Profiling] Switch to OTEL cloud.provider, cloud.region, host.type * Remove wrong spaces from auto-format --- .../component-template/profiling-hosts.json | 25 ++++++-- .../resources/data/profiling-hosts.ndjson | 4 +- .../xpack/profiling/InstanceType.java | 64 ++++++++++++------- .../ProfilingIndexTemplateRegistry.java | 5 +- .../xpack/profiling/HostMetadataTests.java | 60 +++++++++++++++-- .../rest-api-spec/test/profiling/10_basic.yml | 2 +- 6 files changed, 123 insertions(+), 37 deletions(-) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json index f633a8f0cbdb5..353411ed80b2e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/component-template/profiling-hosts.json @@ -33,11 +33,28 @@ "type": "date", "format": "epoch_second" }, - "host.id": { - "type": "keyword" + "host": { + "properties": { + "arch": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "type": { + "type": "keyword" + } + } }, - "host.arch": { - "type": "keyword" + "cloud": { + "properties": { + "provider": { + "type": "keyword" + }, + "region": { + "type": "keyword" + } + } }, "profiling": { "properties": { diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson index e164f49c4f685..e12a670a79d18 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson @@ -1,4 +1,4 @@ {"create": {"_index": "profiling-hosts","_id":"eLH27YsBj2lLi3tJYlvr"}} -{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"ec2.instance_type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"ec2.placement.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} +{"profiling.project.id":100,"host.id":"8457605156473051743","@timestamp":1700504426,"ecs.version":"1.12.0","profiling.agent.build_timestamp":1688111067,"profiling.instance.private_ipv4s":["192.168.1.2"],"ec2.instance_life_cycle":"on-demand","profiling.agent.config.map_scale_factor":0,"host.type":"i3.2xlarge","profiling.host.ip":"192.168.1.2","profiling.agent.config.bpf_log_level":0,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.file":"/etc/prodfiler/prodfiler.conf","ec2.local_ipv4":"192.168.1.2","profiling.agent.config.no_kernel_version_check":false,"host.arch":"amd64","profiling.host.tags":["cloud_provider:aws","cloud_environment:qa","cloud_region:eu-west-1"],"profiling.agent.config.probabilistic_threshold":100,"profiling.agent.config.disable_tls":false,"profiling.agent.config.tracers":"all","profiling.agent.start_time":1700090045589,"profiling.agent.config.max_elements_per_interval":800,"cloud.provider":"aws","cloud.region":"eu-west-1","profiling.agent.config.present_cpu_cores":8,"profiling.host.kernel_version":"9.9.9-0-aws","profiling.agent.config.bpf_log_size":65536,"profiling.agent.config.known_traces_entries":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.verbose":false,"profiling.agent.config.probabilistic_interval":"1m0s","ec2.placement.availability_zone_id":"euw1-az1","ec2.security_groups":"","ec2.local_hostname":"ip-192-168-1-2.eu-west-1.compute.internal","ec2.placement.availability_zone":"eu-west-1c","profiling.agent.config.upload_symbols":false,"profiling.host.sysctl.kernel.bpf_stats_enabled":0,"profiling.host.name":"ip-192-168-1-2","ec2.mac":"00:11:22:33:44:55","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-aws","profiling.agent.config.cache_directory":"/var/cache/optimyze/","profiling.agent.version":"v8.12.0","ec2.hostname":"ip-192-168-1-2.eu-west-1.compute.internal","profiling.agent.config.elastic_mode":false,"ec2.ami_id":"ami-aaaaaaaaaaa","ec2.instance_id":"i-0b999999999999999"} {"create": {"_index": "profiling-hosts", "_id": "u_fHlYwBkmZvQ6tVo1Lr"}} -{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"azure.compute.vmsize":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","azure.compute.location":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} +{"profiling.project.id":100,"host.id":"7416508186220657211","@timestamp":1703319912,"ecs.version":"1.12.0","profiling.agent.version":"8.11.0","profiling.agent.config.map_scale_factor":0,"profiling.agent.config.probabilistic_threshold":100,"profiling.host.name":"ip-192-186-1-3","profiling.agent.config.no_kernel_version_check":false,"profiling.host.sysctl.net.core.bpf_jit_enable":1,"profiling.agent.config.elastic_mode":false,"host.type":"Standard_D4s_v3","azure.compute.environment":"AzurePublicCloud","profiling.agent.config.bpf_log_level":0,"profiling.agent.config.known_traces_entries":65536,"profiling.agent.config.ca_address":"example.com:443","profiling.agent.config.tags":"cloud_provider:azure;cloud_environment:qa;cloud_region:eastus2","profiling.host.tags":["cloud_provider:azure","cloud_environment:qa","cloud_region:eastus2"],"profiling.host.kernel_version":"9.9.9-0-azure","profiling.agent.revision":"head-52cc2030","azure.compute.subscriptionid":"1-2-3-4-5","profiling.host.sysctl.kernel.bpf_stats_enabled":0,"host.arch":"amd64","azure.compute.zone":"3","profiling.agent.config.cache_directory":"/var/cache/Elastic/universal-profiling","azure.compute.name":"example-qa-eastus2-001-v1-zone3_6","profiling.agent.config.probabilistic_interval":"1m0s","cloud.provider":"azure","cloud.region":"eastus2","azure.compute.version":"1234.20230510.233254","profiling.instance.private_ipv4s":["192.168.1.3"],"profiling.agent.build_timestamp":1699000836,"profiling.agent.config.file":"/etc/Elastic/universal-profiling/pf-host-agent.conf","profiling.agent.config.bpf_log_size":65536,"profiling.host.sysctl.kernel.unprivileged_bpf_disabled":1,"profiling.agent.config.tracers":"all","profiling.agent.config.present_cpu_cores":4,"profiling.agent.start_time":1702306987358,"profiling.agent.config.disable_tls":false,"azure.compute.ostype":"Linux","profiling.host.ip":"192.168.1.3","profiling.agent.config.max_elements_per_interval":400,"profiling.agent.config.upload_symbols":false,"azure.compute.tags":"bootstrap-version:v1;ece-id:001;environment:qa;identifier:v1;initial-config:;managed-by:terraform;monitored-by:core-infrastructure;owner:core-infrastructure;region_type:ess;role:blueprint;secondary_role:;vars-identifier:eastus2-001-v1","profiling.host.kernel_proc_version":"Linux version 9.9.9-0-azure","profiling.agent.config.verbose":false,"azure.compute.vmid":"1-2-3-4-5"} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index ee649e381c85d..3aa0a79df13bc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -35,6 +35,45 @@ final class InstanceType implements ToXContentObject { * @return the {@link InstanceType} */ public static InstanceType fromHostSource(Map source) { + String provider = (String) source.get("cloud.provider"); + if (provider != null) { + String region = (String) source.get("cloud.region"); + String instanceType = (String) source.get("host.type"); + return new InstanceType(provider, region, instanceType); + } + + // Check and handle pre-8.14.0 host sources for backwards-compatibility. + InstanceType instanceType = fromObsoleteHostSource(source); + if (instanceType != null) { + return instanceType; + } + + // Support for configured tags (ECS). + // Example of tags: + // "profiling.host.tags": [ + // "cloud_provider:aws", + // "cloud_environment:qa", + // "cloud_region:eu-west-1", + // ], + String region = null; + List tags = listOf(source.get("profiling.host.tags")); + for (String tag : tags) { + String[] kv = tag.toLowerCase(Locale.ROOT).split(":", 2); + if (kv.length != 2) { + continue; + } + if ("cloud_provider".equals(kv[0])) { + provider = kv[1]; + } + if ("cloud_region".equals(kv[0])) { + region = kv[1]; + } + } + + return new InstanceType(provider, region, null); + } + + private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); if (region != null) { @@ -67,30 +106,7 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType("azure", region, instanceType); } - // Support for configured tags (ECS). - // Example of tags: - // "profiling.host.tags": [ - // "cloud_provider:aws", - // "cloud_environment:qa", - // "cloud_region:eu-west-1", - // ], - String provider = null; - region = null; - List tags = listOf(source.get("profiling.host.tags")); - for (String tag : tags) { - String[] kv = tag.toLowerCase(Locale.ROOT).split(":", 2); - if (kv.length != 2) { - continue; - } - if ("cloud_provider".equals(kv[0])) { - provider = kv[1]; - } - if ("cloud_region".equals(kv[0])) { - region = kv[1]; - } - } - - return new InstanceType(provider, region, null); + return null; } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index 738c06fa310a9..e1698e71afab2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -47,13 +47,14 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { // version 4: Added 'service.name' keyword mapping to profiling-events // version 5: Add optional component template '@custom' to all index templates that reference component templates // version 6: Added 'host.arch' keyword mapping to profiling-hosts - public static final int INDEX_TEMPLATE_VERSION = 6; + // version 7: Added 'host.type', 'cloud.provider', 'cloud.region' keyword mappings to profiling-hosts + public static final int INDEX_TEMPLATE_VERSION = 7; // history for individual indices / index templates. Only bump these for breaking changes that require to create a new index public static final int PROFILING_EVENTS_VERSION = 2; public static final int PROFILING_EXECUTABLES_VERSION = 1; public static final int PROFILING_METRICS_VERSION = 1; - public static final int PROFILING_HOSTS_VERSION = 1; + public static final int PROFILING_HOSTS_VERSION = 2; public static final int PROFILING_STACKFRAMES_VERSION = 1; public static final int PROFILING_STACKTRACES_VERSION = 1; public static final int PROFILING_SYMBOLS_VERSION = 1; diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java index de32754ed69ff..5c24e295909bc 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java @@ -14,6 +14,32 @@ public class HostMetadataTests extends ESTestCase { public void testCreateFromSourceAWS() { + final String hostID = "1440256254710195396"; + final String arch = "amd64"; + final String provider = "aws"; + final String region = "eu-west-1"; + final String instanceType = "md5x.large"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource ( + Map.of ( + "host.id", hostID, + "host.arch", arch, + "host.type", instanceType, + "cloud.provider", provider, + "cloud.region", region + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals(instanceType, host.instanceType.name); + } + + public void testCreateFromSourceAWSCompat() { final String hostID = "1440256254710195396"; final String arch = "x86_64"; final String provider = "aws"; @@ -21,8 +47,8 @@ public void testCreateFromSourceAWS() { final String instanceType = "md5x.large"; // tag::noformat - HostMetadata host = HostMetadata.fromSource( - Map.of( + HostMetadata host = HostMetadata.fromSource ( + Map.of ( "host.id", hostID, "host.arch", arch, "ec2.instance_type", instanceType, @@ -39,6 +65,32 @@ public void testCreateFromSourceAWS() { } public void testCreateFromSourceGCP() { + final String hostID = "1440256254710195396"; + final String arch = "amd64"; + final String provider = "gcp"; + final String[] regions = { "", "", "europe-west1", "europewest", "europe-west1" }; + + for (String region : regions) { + // tag::noformat + HostMetadata host = HostMetadata.fromSource ( + Map.of ( + "host.id", hostID, + "host.arch", arch, + "cloud.provider", provider, + "cloud.region", region + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(arch, host.hostArchitecture); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals("", host.instanceType.name); + } + } + + public void testCreateFromSourceGCPCompat() { final String hostID = "1440256254710195396"; final String arch = "x86_64"; final String provider = "gcp"; @@ -142,8 +194,8 @@ public void testCreateFromSourceECS() { Map.of( "host.id", hostID, "host.arch", arch, - "profiling.host.tags", Arrays.asList( - "cloud_provider:"+provider, "cloud_environment:qa", "cloud_region:"+region) + "profiling.host.tags", Arrays.asList ( + "cloud_provider:" + provider, "cloud_environment:qa", "cloud_region:" + region) ) ); // end::noformat diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 4697141bfc599..cc282d26ae418 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -116,7 +116,7 @@ setup: - {"create": {"_index": "profiling-executables", "_id": "lHp5_WAgpLy2alrUVab6HA"}} - {"@timestamp": "1698624000", "Executable": {"build": {"id": "c5f89ea1c68710d2a493bb604c343a92c4f8ddeb"}, "file": {"name": "vmlinux"}}, "Symbolization": {"next_time": "4852491791"}, "ecs": {"version": "1.12.0"}} - {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} - - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } + - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "host.type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "host.arch": "amd64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "cloud.provider": "aws", "cloud.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } - {"index": {"_index": "test-events"}} - {"@timestamp": "1700504427", "events": ["S07KmaoGhvNte78xwwRbZQ"]} --- From fb17da0647b9dffe00449587dc80171eb14c9a35 Mon Sep 17 00:00:00 2001 From: yashdamani Date: Fri, 22 Mar 2024 20:36:59 +0530 Subject: [PATCH 139/214] Don't trim highlight snippet if number_of_fragments is 0 Don't trim highlight snippet if number_of_fragments is 0 Closes #101803 --- docs/changelog/106306.yaml | 6 ++ .../test/search/510_fragment_trimming_fix.yml | 62 +++++++++++++++++++ .../uhighlight/CustomPassageFormatter.java | 12 +++- .../highlight/DefaultHighlighter.java | 7 ++- .../CustomPassageFormatterTests.java | 4 +- .../CustomUnifiedHighlighterTests.java | 2 +- 6 files changed, 86 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/106306.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml diff --git a/docs/changelog/106306.yaml b/docs/changelog/106306.yaml new file mode 100644 index 0000000000000..571fe73c31a3e --- /dev/null +++ b/docs/changelog/106306.yaml @@ -0,0 +1,6 @@ +pr: 99961 +summary: "added fix for inconsistent text trimming in Unified Highlighter" +area: Highlighting +type: bug +issues: + - 101803 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml new file mode 100644 index 0000000000000..355ffeebfb1d3 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_fragment_trimming_fix.yml @@ -0,0 +1,62 @@ +setup: + - skip: + version: ' - 8.13.99' + reason: 'no trimming highlight snippets when number_of_fragments is 0 was introduced in 8.14' + - do: + indices.create: + index: test_trim + body: + mappings: + properties: + text: + type: text + analyzer: whitespace + + - do: + bulk: + refresh: true + body: + - index: + _index: test_trim + _id: 1 + - text: " a b c d " + +--- +"Test unified highlighter with custom passage formatter and number_of_fragments > 0": + - do: + search: + index: test_trim + body: + query: + match: + text: "c" + highlight: + type: unified + number_of_fragments: 1 + fields: + text: + pre_tags: ["("] + post_tags: [")"] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.highlight.text: ["a b (c) d"] } + +--- +"Test unified highlighter with custom passage formatter when number_of_fragments = 0": + - do: + search: + index: test_trim + body: + query: + match: + text: "c" + highlight: + type: unified + number_of_fragments: 0 + fields: + text: + pre_tags: ["("] + post_tags: [")"] + + - match: { hits.total.value: 1 } + - match: { hits.hits.0.highlight.text: [" a b (c) d "] } diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java index 6ae2f53a94ad8..41a68494e7cbb 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatter.java @@ -23,11 +23,13 @@ public class CustomPassageFormatter extends PassageFormatter { private final String preTag; private final String postTag; private final Encoder encoder; + private final int numberOfFragments; - public CustomPassageFormatter(String preTag, String postTag, Encoder encoder) { + public CustomPassageFormatter(String preTag, String postTag, Encoder encoder, int numberOfFragments) { this.preTag = preTag; this.postTag = postTag; this.encoder = encoder; + this.numberOfFragments = numberOfFragments; } @Override @@ -66,8 +68,12 @@ public Snippet[] format(Passage[] passages, String content) { } else if (sb.charAt(sb.length() - 1) == HighlightUtils.NULL_SEPARATOR) { sb.deleteCharAt(sb.length() - 1); } - // and we trim the snippets too - snippets[j] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); + // and we trim the snippets too, if the number of fragments > 0 + if (numberOfFragments == 0) { + snippets[j] = new Snippet(sb.toString(), passage.getScore(), passage.getNumMatches() > 0); + } else { + snippets[j] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); + } } return snippets; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java index e77436ba61423..da1be48e6b2c0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/DefaultHighlighter.java @@ -161,7 +161,12 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) { } protected PassageFormatter getPassageFormatter(SearchHighlightContext.Field field, Encoder encoder) { - return new CustomPassageFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder); + return new CustomPassageFormatter( + field.fieldOptions().preTags()[0], + field.fieldOptions().postTags()[0], + encoder, + field.fieldOptions().numberOfFragments() + ); } protected Analyzer wrapAnalyzer(Analyzer analyzer, Integer maxAnalyzedOffset) { diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java index c1ecaf12828d3..10db924f25f4b 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomPassageFormatterTests.java @@ -21,7 +21,7 @@ public class CustomPassageFormatterTests extends ESTestCase { public void testSimpleFormat() { String content = "This is a really cool highlighter. Unified highlighter gives nice snippets back. No matches here."; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); + CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder(), 3); Passage[] passages = new Passage[3]; String match = "highlighter"; @@ -62,7 +62,7 @@ public void testSimpleFormat() { public void testHtmlEncodeFormat() { String content = "This is a really cool highlighter. Unified highlighter gives nice snippets back."; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new SimpleHTMLEncoder()); + CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new SimpleHTMLEncoder(), 3); Passage[] passages = new Passage[2]; String match = "highlighter"; diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index bf249ba4409ab..8412cc241f51a 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -145,7 +145,7 @@ private void assertHighlightOneDoc( UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); builder.withFieldMatcher(name -> "text".equals(name)); - builder.withFormatter(new CustomPassageFormatter("", "", new DefaultEncoder())); + builder.withFormatter(new CustomPassageFormatter("", "", new DefaultEncoder(), 3)); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( builder, offsetSource, From cbc418235abc81c44b8809b971584f53a823629b Mon Sep 17 00:00:00 2001 From: Fernando Briano Date: Fri, 22 Mar 2024 15:25:38 +0000 Subject: [PATCH 140/214] Updates REST API test issue69009.yml (#106663) --- .../rest-api-spec/test/search.highlight/issue69009.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml index 8b2f2f90dd0ee..cd3751dbb9653 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/issue69009.yml @@ -29,7 +29,7 @@ setup: search: index: test body: { - "query": { "match": { "fox" } }, + "query": { "match": { "text": "fox" } }, "highlight": { "type": "unified", "fields": { "*": { } }, @@ -45,7 +45,7 @@ setup: search: index: test body: { - "query": { "match": { "fox" } }, + "query": { "match": { "text": "fox" } }, "highlight": { "type": "plain", "fields": { "*": { } }, @@ -61,7 +61,7 @@ setup: search: index: test body: { - "query": { "match": { "fox" } }, + "query": { "match": { "text": "fox" } }, "highlight": { "type": "fvh", "fields": { "*": { } }, From ecb422380fddbb3ea024c64405d8bc529c982d4f Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Fri, 22 Mar 2024 12:48:00 -0400 Subject: [PATCH 141/214] [ESQL] Migrate PropagateEquals optimization (#106627) Relates to #105217 This copies the PropagateEquals logical optimization into ESQL, following the pattern established in #106499. I've copied the optimization rule into the ESQL version of OptimizerRules, and the tests into OpitmizerRulesTests, and changed the imports &c to point to the appropriate ESQL classes instead of their QL counterparts. I expect to have several more PRs following this pattern, for the remaining logical optimizations that touch the binary comparison logic. I'm intending to make separate PRs for each, in the interest of making them easier to review. --- .../esql/optimizer/LogicalPlanOptimizer.java | 3 +- .../xpack/esql/optimizer/OptimizerRules.java | 342 ++++++++++++++++ .../esql/optimizer/OptimizerRulesTests.java | 383 +++++++++++++++++- 3 files changed, 715 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 2879173a6f5ad..af8ad7a1fc435 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -82,7 +82,6 @@ import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; -import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PropagateEquals; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN; @@ -126,7 +125,7 @@ protected static Batch operators() { new BooleanSimplification(), new LiteralsOnTheRight(), // needs to occur before BinaryComparison combinations (see class) - new PropagateEquals(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.PropagateEquals(), new PropagateNullable(), new BooleanFunctionEqualsElimination(), new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.CombineDisjunctionsToIn(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index e375f11ab3ae7..3ae662580a200 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -8,7 +8,13 @@ package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; @@ -34,14 +40,23 @@ import org.elasticsearch.xpack.ql.expression.AttributeSet; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.Predicates; +import org.elasticsearch.xpack.ql.expression.predicate.Range; +import org.elasticsearch.xpack.ql.expression.predicate.logical.And; +import org.elasticsearch.xpack.ql.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.plan.QueryPlan; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.CollectionUtils; import java.time.ZoneId; import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -50,6 +65,7 @@ import java.util.Set; import static org.elasticsearch.xpack.ql.common.Failure.fail; +import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.combineOr; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitOr; @@ -240,4 +256,330 @@ protected Expression rule(Or or) { return e; } } + + /** + * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. + * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. + * When encountering a containing {@link Range}, {@link BinaryComparison} or {@link NotEquals}, these get eliminated by the equality. + * + * Since this rule can eliminate Ranges and BinaryComparisons, it should be applied before + * {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineBinaryComparisons}. + * + * This rule doesn't perform any promotion of {@link BinaryComparison}s, that is handled by + * {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.CombineBinaryComparisons} on purpose as the resulting Range might be + * foldable (which is picked by the folding rule on the next run). + */ + public static final class PropagateEquals extends org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule< + BinaryLogic> { + + PropagateEquals() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.DOWN); + } + + public Expression rule(BinaryLogic e) { + if (e instanceof And) { + return propagate((And) e); + } else if (e instanceof Or) { + return propagate((Or) e); + } + return e; + } + + // combine conjunction + private static Expression propagate(And and) { + List ranges = new ArrayList<>(); + // Only equalities, not-equalities and inequalities with a foldable .right are extracted separately; + // the others go into the general 'exps'. + // TODO: In 105217, this should change to EsqlBinaryComparison, but it doesn't exist in this branch yet + List equals = new ArrayList<>(); + List notEquals = new ArrayList<>(); + List inequalities = new ArrayList<>(); + List exps = new ArrayList<>(); + + boolean changed = false; + + for (Expression ex : Predicates.splitAnd(and)) { + if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof Equals || ex instanceof NullEquals) { + BinaryComparison otherEq = (BinaryComparison) ex; + // equals on different values evaluate to FALSE + // ignore date/time fields as equality comparison might actually be a range check + if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) { + for (BinaryComparison eq : equals) { + if (otherEq.left().semanticEquals(eq.left())) { + Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold()); + if (comp != null) { + // var cannot be equal to two different values at the same time + if (comp != 0) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + } + } + equals.add(otherEq); + } else { + exps.add(otherEq); + } + } else if (ex instanceof GreaterThan + || ex instanceof GreaterThanOrEqual + || ex instanceof LessThan + || ex instanceof LessThanOrEqual) { + BinaryComparison bc = (BinaryComparison) ex; + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals otherNotEq) { + if (otherNotEq.right().foldable()) { + notEquals.add(otherNotEq); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + // check + for (BinaryComparison eq : equals) { + Object eqValue = eq.right().fold(); + + for (Iterator iterator = ranges.iterator(); iterator.hasNext();) { + Range range = iterator.next(); + + if (range.value().semanticEquals(eq.left())) { + // if equals is outside the interval, evaluate the whole expression to FALSE + if (range.lower().foldable()) { + Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue); + if (compare != null && ( + // eq outside the lower boundary + compare > 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeLower() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + if (range.upper().foldable()) { + Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue); + if (compare != null && ( + // eq outside the upper boundary + compare < 0 || + // eq matches the boundary but should not be included + (compare == 0 && range.includeUpper() == false))) { + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + + // it's in the range and thus, remove it + iterator.remove(); + changed = true; + } + } + + // evaluate all NotEquals against the Equal + for (Iterator iter = notEquals.iterator(); iter.hasNext();) { + NotEquals neq = iter.next(); + if (eq.left().semanticEquals(neq.left())) { + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1 + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } else { // clashing and redundant: a = 1 AND a != 2 + iter.remove(); + changed = true; + } + } + } + } + + // evaluate all inequalities against the Equal + for (Iterator iter = inequalities.iterator(); iter.hasNext();) { + BinaryComparison bc = iter.next(); + if (eq.left().semanticEquals(bc.left())) { + Integer compare = BinaryComparison.compare(eqValue, bc.right().fold()); + if (compare != null) { + if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a />= ? + if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2 + compare < 0) { // a = 2 AND a >/>= 3 + return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN); + } + } + + iter.remove(); + changed = true; + } + } + } + } + + return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and; + } + + // combine disjunction: + // a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1 + // a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop + // a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop + // a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5 + private static Expression propagate(Or or) { + List exps = new ArrayList<>(); + List equals = new ArrayList<>(); // foldable right term Equals + List notEquals = new ArrayList<>(); // foldable right term NotEquals + List ranges = new ArrayList<>(); + List inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision + + // split expressions by type + for (Expression ex : Predicates.splitOr(or)) { + if (ex instanceof Equals eq) { + if (eq.right().foldable()) { + equals.add(eq); + } else { + exps.add(ex); + } + } else if (ex instanceof NotEquals neq) { + if (neq.right().foldable()) { + notEquals.add(neq); + } else { + exps.add(ex); + } + } else if (ex instanceof Range) { + ranges.add((Range) ex); + } else if (ex instanceof BinaryComparison bc) { + if (bc.right().foldable()) { + inequalities.add(bc); + } else { + exps.add(ex); + } + } else { + exps.add(ex); + } + } + + boolean updated = false; // has the expression been modified? + + // evaluate the impact of each Equal over the different types of Expressions + for (Iterator iterEq = equals.iterator(); iterEq.hasNext();) { + Equals eq = iterEq.next(); + Object eqValue = eq.right().fold(); + boolean removeEquals = false; + + // Equals OR NotEquals + for (NotEquals neq : notEquals) { + if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ... + Integer comp = BinaryComparison.compare(eqValue, neq.right().fold()); + if (comp != null) { + if (comp == 0) { // a = 2 OR a != 2 -> TRUE + return TRUE; + } else { // a = 2 OR a != 5 -> a != 5 + removeEquals = true; + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Range + for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop + Range range = ranges.get(i); + if (eq.left().semanticEquals(range.value())) { + Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null; + Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null; + + if (lowerComp != null && lowerComp == 0) { + if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ? + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + true, + range.upper(), + range.includeUpper(), + range.zoneId() + ) + ); + } // else : a = 2 OR 2 <= a < ? -> 2 <= a < ? + removeEquals = true; // update range with lower equality instead or simply superfluous + break; + } else if (upperComp != null && upperComp == 0) { + if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2 + ranges.set( + i, + new Range( + range.source(), + range.value(), + range.lower(), + range.includeLower(), + range.upper(), + true, + range.zoneId() + ) + ); + } // else : a = 2 OR ? < a <= 2 -> ? < a <= 2 + removeEquals = true; // update range with upper equality instead + break; + } else if (lowerComp != null && upperComp != null) { + if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3 + removeEquals = true; // equality is superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + continue; + } + + // Equals OR Inequality + for (int i = 0; i < inequalities.size(); i++) { + BinaryComparison bc = inequalities.get(i); + if (eq.left().semanticEquals(bc.left())) { + Integer comp = BinaryComparison.compare(eqValue, bc.right().fold()); + if (comp != null) { + if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { + if (comp < 0) { // a = 1 OR a > 2 -> nop + continue; + } else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2 + inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (0 < comp || bc instanceof GreaterThanOrEqual) : + // a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2 + + removeEquals = true; // update range with equality instead or simply superfluous + break; + } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { + if (comp > 0) { // a = 2 OR a < 1 -> nop + continue; + } + if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2 + inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId())); + } // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2 + removeEquals = true; // update range with equality instead or simply superfluous + break; + } + } + } + } + if (removeEquals) { + iterEq.remove(); + updated = true; + } + } + + return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or; + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index dd9704d57b12a..1aac8efbe6f65 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -9,11 +9,19 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NullEquals; +import org.elasticsearch.xpack.ql.TestUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.predicate.Predicates; +import org.elasticsearch.xpack.ql.expression.predicate.Range; import org.elasticsearch.xpack.ql.expression.predicate.logical.And; import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; import org.elasticsearch.xpack.ql.plan.logical.Filter; @@ -24,8 +32,12 @@ import java.util.List; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.ql.TestUtils.getFieldAttribute; +import static org.elasticsearch.xpack.ql.TestUtils.equalsOf; +import static org.elasticsearch.xpack.ql.TestUtils.nullEqualsOf; +import static org.elasticsearch.xpack.ql.TestUtils.rangeOf; import static org.elasticsearch.xpack.ql.TestUtils.relation; +import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; +import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; @@ -33,6 +45,8 @@ public class OptimizerRulesTests extends ESTestCase { private static final Literal ONE = new Literal(Source.EMPTY, 1, DataTypes.INTEGER); private static final Literal TWO = new Literal(Source.EMPTY, 2, DataTypes.INTEGER); private static final Literal THREE = new Literal(Source.EMPTY, 3, DataTypes.INTEGER); + private static final Literal FOUR = new Literal(Source.EMPTY, 4, DataTypes.INTEGER); + private static final Literal FIVE = new Literal(Source.EMPTY, 5, DataTypes.INTEGER); private static Equals equalsOf(Expression left, Expression right) { return new Equals(EMPTY, left, right, null); @@ -42,11 +56,35 @@ private static LessThan lessThanOf(Expression left, Expression right) { return new LessThan(EMPTY, left, right, null); } + public static GreaterThan greaterThanOf(Expression left, Expression right) { + return new GreaterThan(EMPTY, left, right, randomZone()); + } + + public static NotEquals notEqualsOf(Expression left, Expression right) { + return new NotEquals(EMPTY, left, right, randomZone()); + } + + public static NullEquals nullEqualsOf(Expression left, Expression right) { + return new NullEquals(EMPTY, left, right, randomZone()); + } + + public static LessThanOrEqual lessThanOrEqualOf(Expression left, Expression right) { + return new LessThanOrEqual(EMPTY, left, right, randomZone()); + } + + public static GreaterThanOrEqual greaterThanOrEqualOf(Expression left, Expression right) { + return new GreaterThanOrEqual(EMPTY, left, right, randomZone()); + } + + private static FieldAttribute getFieldAttribute() { + return TestUtils.getFieldAttribute("a"); + } + // // CombineDisjunction in Equals // public void testTwoEqualsWithOr() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); @@ -57,7 +95,7 @@ public void testTwoEqualsWithOr() { } public void testTwoEqualsWithSameValue() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), equalsOf(fa, ONE)); Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); @@ -68,7 +106,7 @@ public void testTwoEqualsWithSameValue() { } public void testOneEqualsOneIn() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, List.of(TWO))); Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); @@ -79,7 +117,7 @@ public void testOneEqualsOneIn() { } public void testOneEqualsOneInWithSameValue() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or or = new Or(EMPTY, equalsOf(fa, ONE), new In(EMPTY, fa, asList(ONE, TWO))); Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); @@ -90,7 +128,7 @@ public void testOneEqualsOneInWithSameValue() { } public void testSingleValueInToEquals() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Equals equals = equalsOf(fa, ONE); Or or = new Or(EMPTY, equals, new In(EMPTY, fa, List.of(ONE))); @@ -99,7 +137,7 @@ public void testSingleValueInToEquals() { } public void testEqualsBehindAnd() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); And and = new And(EMPTY, equalsOf(fa, ONE), equalsOf(fa, TWO)); Filter dummy = new Filter(EMPTY, relation(), and); @@ -109,8 +147,8 @@ public void testEqualsBehindAnd() { } public void testTwoEqualsDifferentFields() { - FieldAttribute fieldOne = getFieldAttribute("ONE"); - FieldAttribute fieldTwo = getFieldAttribute("TWO"); + FieldAttribute fieldOne = TestUtils.getFieldAttribute("ONE"); + FieldAttribute fieldTwo = TestUtils.getFieldAttribute("TWO"); Or or = new Or(EMPTY, equalsOf(fieldOne, ONE), equalsOf(fieldTwo, TWO)); Expression e = new OptimizerRules.CombineDisjunctionsToIn().rule(or); @@ -118,7 +156,7 @@ public void testTwoEqualsDifferentFields() { } public void testMultipleIn() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), new In(EMPTY, fa, List.of(TWO))); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); @@ -130,7 +168,7 @@ public void testMultipleIn() { } public void testOrWithNonCombinableExpressions() { - FieldAttribute fa = getFieldAttribute("a"); + FieldAttribute fa = getFieldAttribute(); Or firstOr = new Or(EMPTY, new In(EMPTY, fa, List.of(ONE)), lessThanOf(fa, TWO)); Or secondOr = new Or(EMPTY, firstOr, new In(EMPTY, fa, List.of(THREE))); @@ -143,4 +181,327 @@ public void testOrWithNonCombinableExpressions() { assertEquals(fa, in.value()); assertThat(in.list(), contains(ONE, THREE)); } + + // a == 1 AND a == 2 -> FALSE + public void testDualEqualsConjunction() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, ONE); + Equals eq2 = equalsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); + assertEquals(FALSE, exp); + } + + // a <=> 1 AND a <=> 2 -> FALSE + public void testDualNullEqualsConjunction() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, ONE); + NullEquals eq2 = nullEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, eq2)); + assertEquals(FALSE, exp); + } + + // 1 < a < 10 AND a == 10 -> FALSE + public void testEliminateRangeByEqualsOutsideInterval() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, new Literal(EMPTY, 10, DataTypes.INTEGER)); + Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(FALSE, exp); + } + + // 1 < a < 10 AND a <=> 10 -> FALSE + public void testEliminateRangeByNullEqualsOutsideInterval() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, new Literal(EMPTY, 10, DataTypes.INTEGER)); + Range r = rangeOf(fa, ONE, false, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(FALSE, exp); + } + + // a != 3 AND a = 3 -> FALSE + public void testPropagateEquals_VarNeq3AndVarEq3() { + FieldAttribute fa = getFieldAttribute(); + NotEquals neq = notEqualsOf(fa, THREE); + Equals eq = equalsOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, neq, eq)); + assertEquals(FALSE, exp); + } + + // a != 4 AND a = 3 -> a = 3 + public void testPropagateEquals_VarNeq4AndVarEq3() { + FieldAttribute fa = getFieldAttribute(); + NotEquals neq = notEqualsOf(fa, FOUR); + Equals eq = equalsOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, neq, eq)); + assertEquals(Equals.class, exp.getClass()); + assertEquals(eq, exp); + } + + // a = 2 AND a < 2 -> FALSE + public void testPropagateEquals_VarEq2AndVarLt2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a <= 2 -> a = 2 + public void testPropagateEquals_VarEq2AndVarLte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(eq, exp); + } + + // a = 2 AND a <= 1 -> FALSE + public void testPropagateEquals_VarEq2AndVarLte1() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, lt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a > 2 -> FALSE + public void testPropagateEquals_VarEq2AndVarGt2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a >= 2 -> a = 2 + public void testPropagateEquals_VarEq2AndVarGte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gte)); + assertEquals(eq, exp); + } + + // a = 2 AND a > 3 -> FALSE + public void testPropagateEquals_VarEq2AndVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq, gt)); + assertEquals(FALSE, exp); + } + + // a = 2 AND a < 3 AND a > 1 AND a != 4 -> a = 2 + public void testPropagateEquals_VarEq2AndVarLt3AndVarGt1AndVarNeq4() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, THREE); + GreaterThan gt = greaterThanOf(fa, ONE); + NotEquals neq = notEqualsOf(fa, FOUR); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq)); + Expression exp = rule.rule((And) and); + assertEquals(eq, exp); + } + + // a = 2 AND 1 < a < 3 AND a > 0 AND a != 4 -> a = 2 + public void testPropagateEquals_VarEq2AndVarRangeGt1Lt3AndVarGt0AndVarNeq4() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, ONE, false, THREE, false); + GreaterThan gt = greaterThanOf(fa, new Literal(EMPTY, 0, DataTypes.INTEGER)); + NotEquals neq = notEqualsOf(fa, FOUR); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression and = Predicates.combineAnd(asList(eq, range, gt, neq)); + Expression exp = rule.rule((And) and); + assertEquals(eq, exp); + } + + // a = 2 OR a > 1 -> a > 1 + public void testPropagateEquals_VarEq2OrVarGt1() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, ONE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + assertEquals(gt, exp); + } + + // a = 2 OR a > 2 -> a >= 2 + public void testPropagateEquals_VarEq2OrVarGte2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + GreaterThan gt = greaterThanOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, gt)); + assertEquals(GreaterThanOrEqual.class, exp.getClass()); + GreaterThanOrEqual gte = (GreaterThanOrEqual) exp; + assertEquals(TWO, gte.right()); + } + + // a = 2 OR a < 3 -> a < 3 + public void testPropagateEquals_VarEq2OrVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + LessThan lt = lessThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + assertEquals(lt, exp); + } + + // a = 3 OR a < 3 -> a <= 3 + public void testPropagateEquals_VarEq3OrVarLt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, THREE); + LessThan lt = lessThanOf(fa, THREE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, lt)); + assertEquals(LessThanOrEqual.class, exp.getClass()); + LessThanOrEqual lte = (LessThanOrEqual) exp; + assertEquals(THREE, lte.right()); + } + + // a = 2 OR 1 < a < 3 -> 1 < a < 3 + public void testPropagateEquals_VarEq2OrVarRangeGt1Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, ONE, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(range, exp); + } + + // a = 2 OR 2 < a < 3 -> 2 <= a < 3 + public void testPropagateEquals_VarEq2OrVarRangeGt2Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, TWO, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(Range.class, exp.getClass()); + Range r = (Range) exp; + assertEquals(TWO, r.lower()); + assertTrue(r.includeLower()); + assertEquals(THREE, r.upper()); + assertFalse(r.includeUpper()); + } + + // a = 3 OR 2 < a < 3 -> 2 < a <= 3 + public void testPropagateEquals_VarEq3OrVarRangeGt2Lt3() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, THREE); + Range range = rangeOf(fa, TWO, false, THREE, false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, range)); + assertEquals(Range.class, exp.getClass()); + Range r = (Range) exp; + assertEquals(TWO, r.lower()); + assertFalse(r.includeLower()); + assertEquals(THREE, r.upper()); + assertTrue(r.includeUpper()); + } + + // a = 2 OR a != 2 -> TRUE + public void testPropagateEquals_VarEq2OrVarNeq2() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + assertEquals(TRUE, exp); + } + + // a = 2 OR a != 5 -> a != 5 + public void testPropagateEquals_VarEq2OrVarNeq5() { + FieldAttribute fa = getFieldAttribute(); + Equals eq = equalsOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, FIVE); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new Or(EMPTY, eq, neq)); + assertEquals(NotEquals.class, exp.getClass()); + NotEquals ne = (NotEquals) exp; + assertEquals(FIVE, ne.right()); + } + + // a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE + public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() { + FieldAttribute fa = getFieldAttribute(); + org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.Equals eq = equalsOf(fa, TWO); + Range range = rangeOf(fa, THREE, false, FOUR, false); + GreaterThan gt = greaterThanOf(fa, TWO); + NotEquals neq = notEqualsOf(fa, TWO); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt))); + assertEquals(TRUE, exp); + } + + // a == 1 AND a == 2 -> nop for date/time fields + public void testPropagateEquals_ignoreDateTimeFields() { + FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME); + Equals eq1 = equalsOf(fa, ONE); + Equals eq2 = equalsOf(fa, TWO); + And and = new And(EMPTY, eq1, eq2); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(and); + assertEquals(and, exp); + } + + // 1 <= a < 10 AND a == 1 -> a == 1 + public void testEliminateRangeByEqualsInInterval() { + FieldAttribute fa = getFieldAttribute(); + Equals eq1 = equalsOf(fa, ONE); + Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(eq1, exp); + } + + // 1 <= a < 10 AND a <=> 1 -> a <=> 1 + public void testEliminateRangeByNullEqualsInInterval() { + FieldAttribute fa = getFieldAttribute(); + NullEquals eq1 = nullEqualsOf(fa, ONE); + Range r = rangeOf(fa, ONE, true, new Literal(EMPTY, 10, DataTypes.INTEGER), false); + + OptimizerRules.PropagateEquals rule = new OptimizerRules.PropagateEquals(); + Expression exp = rule.rule(new And(EMPTY, eq1, r)); + assertEquals(eq1, exp); + } } From 1259aeea9b2eb1f1b29f43203b74c7a73d2fc4e8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Mar 2024 13:44:10 -0400 Subject: [PATCH 142/214] ESQL: Switch more tests from version checks (#106332) This switches another pile of ESQL's tests from checking the release version to checking for feature markers. --- .../src/main/resources/boolean.csv-spec | 16 ++++-- .../src/main/resources/conditional.csv-spec | 20 +++++-- .../src/main/resources/date.csv-spec | 4 +- .../src/main/resources/eval.csv-spec | 4 +- .../src/main/resources/floats.csv-spec | 12 +++- .../src/main/resources/ints.csv-spec | 56 ++++++++++++++----- .../src/main/resources/ip.csv-spec | 16 ++++-- .../src/main/resources/spatial.csv-spec | 16 ++++-- .../src/main/resources/string.csv-spec | 20 +++++-- .../xpack/esql/plugin/EsqlFeatures.java | 30 ++++++++-- 10 files changed, 148 insertions(+), 46 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 1406028b2c81f..2713660cd47d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -235,14 +235,18 @@ emp_no:integer |languages:integer |byte2bool:boolean |short2bool:boolean 10030 |3 |true |true ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = [true, false, true, false] | eval sa = mv_sort(a), sb = mv_sort(a, "DESC"); a:boolean | sa:boolean | sb:boolean [true, false, true, false] | [false, false, true, true] | [true, true, false, false] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(is_rehired, "DESC"), sa = mv_sort(is_rehired) | sort emp_no @@ -258,7 +262,9 @@ emp_no:integer | is_rehired:boolean | sa:boolean | sd:boolea 10005 | [false,false,false,true] | [false,false,false,true] | [true,false,false,false] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); @@ -266,7 +272,9 @@ a:boolean | a1:boolean | a2:boolean [true, false, false, true] | false | [false, true] ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(is_rehired, 0) | keep emp_no, is_rehired, a1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64c5a7358ce22..f574722f691e5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -129,7 +129,9 @@ error_rate:double | hour:date ; -nullOnMultivaluesMathOperation#[skip:-8.13.99,reason:fixed in 8.14+] +nullOnMultivaluesMathOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:37: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -139,7 +141,9 @@ a:integer | b:integer | sum:integer ; -notNullOnMultivaluesMathOperation#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesMathOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NOT NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:37: java.lang.IllegalArgumentException: single-value function encountered multi-value @@ -148,7 +152,9 @@ a:integer | b:integer | sum:integer ; -nullOnMultivaluesComparisonOperation#[skip:-8.13.99,reason:fixed in 8.14+] +nullOnMultivaluesComparisonOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; a:integer | b:integer | same:boolean @@ -156,14 +162,18 @@ a:integer | b:integer | same:boolean ; -notNullOnMultivaluesComparisonOperation#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesComparisonOperation +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; a:integer | b:integer | same:boolean ; -notNullOnMultivaluesComparisonOperationWithPartialMatch#[skip:-8.13.99,reason:fixed in 8.14+] +notNullOnMultivaluesComparisonOperationWithPartialMatch +required_feature: esql.disable_nullable_opts + ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; a:integer | b:integer | same:boolean diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 09128eca5c18e..de7a48bcf6834 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1154,7 +1154,9 @@ FROM sample_data // end::docsNowWhere-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = ["1985-01-01T00:00:00.000Z", "1986-01-01T00:00:00.000Z", "1987-01-01T00:00:00.000Z"] | eval datetime = TO_DATETIME(a) | eval sa = mv_sort(datetime), sd = mv_sort(datetime, "DESC") diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 7d18d2616e376..9b06e9a0a8b23 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -200,7 +200,9 @@ Chirstian. |Chirstian.Koblick|Chirstian.KoblickChirstian.|Chirstian Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; -roundArrays#[skip:-8.13.99, reason:Alert order changed in 8.14] +roundArrays +required_feature: esql.disable_nullable_opts + row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. warning:Line 1:56: java.lang.IllegalArgumentException: single-value function encountered multi-value diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 7e52864d0e379..75011388a9f5a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -240,7 +240,9 @@ row a = [1.1, 2.1, 2.1] | eval da = mv_dedupe(a); [1.1, 2.1, 2.1] | [1.1, 2.1] ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change, 0, 1) | keep emp_no, salary_change, a1 @@ -455,14 +457,18 @@ ROW deg = [90.0, 180.0, 270.0] // end::to_radians-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = [4.0, 2.0, -3.0, 2.0] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); a:double | sa:double | sd:double [4.0, 2.0, -3.0, 2.0] | [-3.0, 2.0, 2.0, 4.0] | [4.0, 2.0, 2.0, -3.0] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change, "DESC"), sa = mv_sort(salary_change) | sort emp_no diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 20f2e579643f2..7a64c9a87e0c9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -416,7 +416,9 @@ row a = [1, 2, 2, 3] | eval da = mv_dedupe(a); [1, 2, 2, 3] | [1, 2, 3] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + // tag::mv_sort[] ROW a = [4, 2, -3, 2] | EVAL sa = mv_sort(a), sd = mv_sort(a, "DESC") @@ -429,7 +431,9 @@ a:integer | sa:integer | sd:integer // end::mv_sort-result[] ; -mvSortEmpInt#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmpInt +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change.int, "DESC"), sa = mv_sort(salary_change.int) | sort emp_no @@ -449,7 +453,9 @@ emp_no:integer | salary_change.int:integer | sa:integer | sd:integer 10009 | null | null | null ; -mvSortEmpLong#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmpLong +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(salary_change.long, "DESC"), sa = mv_sort(salary_change.long) | sort emp_no @@ -469,7 +475,9 @@ emp_no:integer | salary_change.long:long | sa:long | sd:long 10009 | null | null | null ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + // tag::mv_slice_positive[] row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3) @@ -481,7 +489,9 @@ a:integer | a1:integer | a2:integer // end::mv_slice_positive-result[] ; -mvSliceNegativeOffset#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceNegativeOffset +required_feature: esql.mv_sort + // tag::mv_slice_negative[] row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, -2), a2 = mv_slice(a, -3, -1) @@ -493,7 +503,9 @@ a:integer | a1:integer | a2:integer // end::mv_slice_negative-result[] ; -mvSliceSingle#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceSingle +required_feature: esql.mv_sort + row a = 1 | eval a1 = mv_slice(a, 0); @@ -501,7 +513,9 @@ a:integer | a1:integer 1 | 1 ; -mvSliceOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceOutOfBound +required_feature: esql.mv_sort + row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 4), a2 = mv_slice(a, 2, 6), a3 = mv_slice(a, 4, 6); @@ -509,7 +523,9 @@ a:integer | a1:integer | a2:integer | a3:integer [1, 2, 2, 3] | null | [2, 3] | null ; -mvSliceEmpInt#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpInt +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 0, 1) | keep emp_no, salary_change.int, a1 @@ -524,7 +540,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | [-2, 13] ; -mvSliceEmpIntSingle#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntSingle +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 1) | keep emp_no, salary_change.int, a1 @@ -539,7 +557,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | 13 ; -mvSliceEmpIntEndOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntEndOutOfBound +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 1, 4) | keep emp_no, salary_change.int, a1 @@ -554,7 +574,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | 13 ; -mvSliceEmpIntOutOfBound#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntOutOfBound +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, 2, 4) | keep emp_no, salary_change.int, a1 @@ -569,7 +591,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | null ; -mvSliceEmpIntStartOutOfBoundNegative#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntStartOutOfBoundNegative +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, -5, -2) | keep emp_no, salary_change.int, a1 @@ -584,7 +608,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | -2 ; -mvSliceEmpIntOutOfBoundNegative#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpIntOutOfBoundNegative +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.int, -5, -3) | keep emp_no, salary_change.int, a1 @@ -599,7 +625,9 @@ emp_no:integer | salary_change.int:integer | a1:integer 10005 | [-2, 13] | null ; -mvSliceEmpLong#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmpLong +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.long, 0, 1) | keep emp_no, salary_change.long, a1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index b83dda1376ac5..09b17ed4112c9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -302,7 +302,9 @@ eth0 |127.0.0.3 eth0 |fe80::cae2:65ff:fece:fec1 ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + FROM hosts | eval sd = mv_sort(ip1, "DESC"), sa = mv_sort(ip1) | sort host desc, ip1 @@ -318,7 +320,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe82::cae2:65ff:fece:fec0, fe81::cae2:65ff:fece:feb9] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + from hosts | where host == "epsilon" | eval a1 = mv_slice(ip1, 0, 1) @@ -332,7 +336,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -mvSlice#[skip:-8.13.99, reason:newly added in 8.14] +mvSlice +required_feature: esql.mv_sort + from hosts | where host == "epsilon" | eval a1 = mv_slice(ip1, 0, 1) @@ -346,7 +352,9 @@ epsilon | fe80::cae2:65ff:fece:fec1 | fe80::ca epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -mvZip#[skip:-8.13.99, reason:newly added in 8.14] +mvZip +required_feature: esql.mv_sort + from hosts | eval zip = mv_zip(to_string(description), to_string(ip0), "@@") | keep host, description, ip0, zip diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 88155301a06bc..495d0cbb8d7f0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -71,7 +71,9 @@ c:geo_point POINT(39.58327988510707 20.619513023697994) ; -centroidFromString4#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +centroidFromString4 +required_feature: esql.st_x_y + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -82,7 +84,9 @@ c:geo_point | x:double | y:double POINT(39.58327988510707 20.619513023697994) | 39.58327988510707 | 20.619513023697994 ; -stXFromString#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromString +required_feature: esql.st_x_y + // tag::st_x_y[] ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") | EVAL x = ST_X(point), y = ST_Y(point) @@ -113,7 +117,9 @@ WIIT | Bandar Lampung | POINT(105.2667 -5.45) | Indonesia ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran | POINT(60.900708564915 29.4752941956573) | Zahedan Int'l | 9 | mid ; -stXFromAirportsSupportsNull#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromAirportsSupportsNull +required_feature: esql.st_x_y + FROM airports | EVAL x = FLOOR(ABS(ST_X(city_location))/200), y = FLOOR(ABS(ST_Y(city_location))/100) | STATS c = count(*) BY x, y @@ -604,7 +610,9 @@ c:cartesian_point POINT(3949.163965353159 1078.2645465797348) ; -stXFromCartesianString#[skip:-8.13.99, reason:st_x and st_y added in 8.14] +stXFromCartesianString +required_feature: esql.st_x_y + ROW point = TO_CARTESIANPOINT("POINT(4297.10986328125 -1475.530029296875)") | EVAL x = ST_X(point), y = ST_Y(point) ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 1bd7860af1018..06fca2682bbb9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -714,14 +714,18 @@ ROW a=[10, 9, 8] // end::mv_concat-to_string-result[] ; -mvSort#[skip:-8.13.99, reason:newly added in 8.14] +mvSort +required_feature: esql.mv_sort + row a = ["Mon", "Tues", "Wed", "Thu", "Fri"] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); a:keyword | sa:keyword | sd:keyword ["Mon", "Tues", "Wed", "Thu", "Fri"] | [Fri, Mon, Thu, Tues, Wed] | [Wed, Tues, Thu, Mon, Fri] ; -mvSortEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSortEmp +required_feature: esql.mv_sort + FROM employees | eval sd = mv_sort(job_positions, "DESC"), sa = mv_sort(job_positions) | sort emp_no @@ -737,7 +741,9 @@ emp_no:integer | job_positions:keyword 10005 | null | null | null ; -mvSliceEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvSliceEmp +required_feature: esql.mv_sort + from employees | eval a1 = mv_slice(salary_change.keyword, 0, 1) | keep emp_no, salary_change.keyword, a1 @@ -752,7 +758,9 @@ emp_no:integer | salary_change.keyword:keyword | a1:keyword 10005 | [-2.14,13.07] | [-2.14,13.07] ; -mvZip#[skip:-8.13.99, reason:newly added in 8.14] +mvZip +required_feature: esql.mv_sort + // tag::mv_zip[] ROW a = ["x", "y", "z"], b = ["1", "2"] | EVAL c = mv_zip(a, b, "-") @@ -766,7 +774,9 @@ a:keyword | b:keyword | c:keyword // end::mv_zip-result[] ; -mvZipEmp#[skip:-8.13.99, reason:newly added in 8.14] +mvZipEmp +required_feature: esql.mv_sort + from employees | eval full_name = mv_zip(first_name, last_name, " "), full_name_2 = mv_zip(last_name, first_name), jobs = mv_zip(job_positions, salary_change.keyword, "#") | keep emp_no, full_name, full_name_2, job_positions, salary_change.keyword, jobs diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 3b2c1e9d9a486..17f262143f57a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -15,6 +15,23 @@ import java.util.Set; public class EsqlFeatures implements FeatureSpecification { + /** + * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. + * Added in #106095. + */ + private static final NodeFeature MV_SORT = new NodeFeature("esql.mv_sort"); + + /** + * When we disabled some broken optimizations around {@code nullable}. + * Fixed in #105691. + */ + private static final NodeFeature DISABLE_NULLABLE_OPTS = new NodeFeature("esql.disable_nullable_opts"); + + /** + * Introduction of {@code ST_X} and {@code ST_Y}. Added in #105768. + */ + private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); + /** * When we added the warnings for multivalued fields emitting {@code null} * when they touched multivalued fields. Added in #102417. @@ -48,8 +65,16 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature AGG_VALUES = new NodeFeature("esql.agg_values"); + /** + * Does ESQL support async queries. + */ public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); + @Override + public Set getFeatures() { + return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y); + } + @Override public Map getHistoricalFeatures() { return Map.ofEntries( @@ -61,9 +86,4 @@ public Map getHistoricalFeatures() { // Map.entry(GEO_SHAPE_SUPPORT, Version.V_8_13_0) ); } - - @Override - public Set getFeatures() { - return Set.of(ASYNC_QUERY, AGG_VALUES); - } } From 82d7e4ec937a59620c535b18ae9e403ddfcc7525 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Fri, 22 Mar 2024 14:06:19 -0400 Subject: [PATCH 143/214] [DOCS] Clarify behavior of the generic `data` node role (#106375) --- docs/reference/modules/node.asciidoc | 41 +++++++++++++++------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index ec60b2bca37e4..8a42d11f6367a 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -68,8 +68,8 @@ A node that has the `master` role, which makes it eligible to be <>:: -A node that has the `data` role. Data nodes hold data and perform data -related operations such as CRUD, search, and aggregations. A node with the `data` role can fill any of the specialised data node roles. +A node that has one of several data roles. Data nodes hold data and perform data +related operations such as CRUD, search, and aggregations. A node with a generic `data` role can fill any of the specialized data node roles. <>:: @@ -220,7 +220,7 @@ therefore ensure that the storage and networking available to the nodes in your cluster are good enough to meet your performance goals. [[data-node]] -==== Data node +==== Data nodes Data nodes hold the shards that contain the documents you have indexed. Data nodes handle data related operations like CRUD, search, and aggregations. @@ -230,20 +230,27 @@ monitor these resources and to add more data nodes if they are overloaded. The main benefit of having dedicated data nodes is the separation of the master and data roles. -To create a dedicated data node, set: +In a multi-tier deployment architecture, you use specialized data roles to +assign data nodes to specific tiers: `data_content`,`data_hot`, `data_warm`, +`data_cold`, or `data_frozen`. A node can belong to multiple tiers. + +If you want to include a node in all tiers, or if your cluster does not use multiple tiers, then you can use the generic `data` role. + +WARNING: If you assign a node to a specific tier using a specialized data role, then you shouldn't also assign it the generic `data` role. The generic `data` role takes precedence over specialized data roles. + +[[generic-data-node]] +===== Generic data node + +Generic data nodes are included in all content tiers. + +To create a dedicated generic data node, set: [source,yaml] ---- node.roles: [ data ] ---- -In a multi-tier deployment architecture, you use specialized data roles to -assign data nodes to specific tiers: `data_content`,`data_hot`, `data_warm`, -`data_cold`, or `data_frozen`. A node can belong to multiple tiers, but a node -that has one of the specialized data roles cannot have the generic `data` role. - -[role="xpack"] [[data-content-node]] -==== Content data node +===== Content data node Content data nodes are part of the content tier. include::{es-repo-dir}/datatiers.asciidoc[tag=content-tier] @@ -254,9 +261,8 @@ To create a dedicated content node, set: node.roles: [ data_content ] ---- -[role="xpack"] [[data-hot-node]] -==== Hot data node +===== Hot data node Hot data nodes are part of the hot tier. include::{es-repo-dir}/datatiers.asciidoc[tag=hot-tier] @@ -267,9 +273,8 @@ To create a dedicated hot node, set: node.roles: [ data_hot ] ---- -[role="xpack"] [[data-warm-node]] -==== Warm data node +===== Warm data node Warm data nodes are part of the warm tier. include::{es-repo-dir}/datatiers.asciidoc[tag=warm-tier] @@ -280,9 +285,8 @@ To create a dedicated warm node, set: node.roles: [ data_warm ] ---- -[role="xpack"] [[data-cold-node]] -==== Cold data node +===== Cold data node Cold data nodes are part of the cold tier. include::{es-repo-dir}/datatiers.asciidoc[tag=cold-tier] @@ -293,9 +297,8 @@ To create a dedicated cold node, set: node.roles: [ data_cold ] ---- -[role="xpack"] [[data-frozen-node]] -==== Frozen data node +===== Frozen data node Frozen data nodes are part of the frozen tier. include::{es-repo-dir}/datatiers.asciidoc[tag=frozen-tier] From bb9566a57e0e65b601036fbceab5fac7c056e794 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Fri, 22 Mar 2024 14:43:48 -0500 Subject: [PATCH 144/214] Update discovery.asciidoc (#106541) (#106695) Fix typo (cherry picked from commit 96a46b9c5b41145626477dc2fa062456d3e46a75) Co-authored-by: Boen <13752080613@163.com> --- docs/reference/modules/discovery/discovery.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/modules/discovery/discovery.asciidoc b/docs/reference/modules/discovery/discovery.asciidoc index a665a401ebab2..2311fa738fc1a 100644 --- a/docs/reference/modules/discovery/discovery.asciidoc +++ b/docs/reference/modules/discovery/discovery.asciidoc @@ -115,7 +115,7 @@ supplied in `unicast_hosts.txt`. The `unicast_hosts.txt` file contains one node entry per line. Each node entry consists of the host (host name or IP address) and an optional transport port -number. If the port number is specified, is must come immediately after the +number. If the port number is specified, it must come immediately after the host (on the same line) separated by a `:`. If the port number is not specified, {es} will implicitly use the first port in the port range given by `transport.profiles.default.port`, or by `transport.port` if From 35fcc9a29d88bad59adf6a50e83754629df73cb5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Mar 2024 16:30:59 -0400 Subject: [PATCH 145/214] ESQL: Add README.md to docs (#106698) This explains how to run the tests that build the docs. I tried to add it in #106577 but the sync code deleted it. So I fixed that too. --- docs/reference/esql/functions/README.md | 21 +++++++++++++++++++ x-pack/plugin/esql/build.gradle | 2 +- .../function/AbstractFunctionTestCase.java | 3 ++- 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 docs/reference/esql/functions/README.md diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md new file mode 100644 index 0000000000000..fd310ebacfe7e --- /dev/null +++ b/docs/reference/esql/functions/README.md @@ -0,0 +1,21 @@ +The files in these subdirectories and generated by ESQL's test suite: +* `description` - description of each function scraped from `@FunctionInfo#description` +* `examples` - examples of each function scraped from `@FunctionInfo#examples` +* `parameters` - description of each function's parameters scraped from `@Param` +* `signature` - railroad diagram of the syntax to invoke each function +* `types` - a table of each combination of support type for each parameter. These are generated from tests. +* `layout` - a fully generated description for each function + +Most functions can use the generated docs generated in the `layout` directory. +If we need something more custom for the function we can make a file in this +directory that can `include::` any parts of the files above. + +To regenerate the files for a function run its tests using gradle: +``` +./gradlew :x-pack:plugin:esql:tests -Dtests.class='*SinTests' +``` + +To regenerate the files for all functions run all of ESQL's tests using gradle: +``` +./gradlew :x-pack:plugin:esql:tests +``` diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 8f8d2774a5020..3fdfa7835b036 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -88,7 +88,7 @@ tasks.named("test").configure { into "${rootDir}/docs/reference/esql/functions" include '**/*.asciidoc', '**/*.svg' preserve { - include '/*.asciidoc' + include '/*.asciidoc', 'README.md' } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 61d62d6f54344..755e5fcf25b9b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -10,6 +10,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; @@ -1161,7 +1162,7 @@ private static void renderDescription(String description, String note) throws IO *Description* """ + description + "\n"; - if (note != null) { + if (Strings.isNullOrEmpty(note) == false) { rendered += "\nNOTE: " + note + "\n"; } LogManager.getLogger(getTestClass()).info("Writing description for [{}]:\n{}", functionName(), rendered); From e9fcb0a0f0f45773248c5258982d6bf2d5517299 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Sun, 24 Mar 2024 13:19:49 +0200 Subject: [PATCH 146/214] Retrieve routing hash from id when missing (#106682) * Retrieve routing hash from id when missing * add test * add test 2 --- .../action/index/IndexRequest.java | 2 +- .../TimeSeriesRoutingHashFieldMapper.java | 9 ++++- .../mapper/TimeSeriesIdFieldMapperTests.java | 34 ------------------- ...TimeSeriesRoutingHashFieldMapperTests.java | 20 +++++++++++ 4 files changed, 29 insertions(+), 36 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index a8d6220415a43..d142db2d5a1ab 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -329,7 +329,7 @@ public IndexRequest id(String id) { */ @Override public IndexRequest routing(String routing) { - if (routing != null && routing.length() == 0) { + if (routing != null && routing.isEmpty()) { this.routing = null; } else { this.routing = routing; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java index d5750600a25c9..b9629d7561982 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapper.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import java.time.ZoneId; +import java.util.Arrays; import java.util.Base64; import java.util.Collections; @@ -111,7 +112,13 @@ public void postParse(DocumentParserContext context) { if (context.indexSettings().getMode() == IndexMode.TIME_SERIES && context.indexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.TIME_SERIES_ROUTING_HASH_IN_ID)) { String routingHash = context.sourceToParse().routing(); - var field = new SortedDocValuesField(NAME, Uid.encodeId(routingHash != null ? routingHash : encode(0))); + if (routingHash == null) { + assert context.sourceToParse().id() != null; + routingHash = Base64.getUrlEncoder() + .withoutPadding() + .encodeToString(Arrays.copyOf(Base64.getUrlDecoder().decode(context.sourceToParse().id()), 4)); + } + var field = new SortedDocValuesField(NAME, Uid.encodeId(routingHash)); context.rootDoc().add(field); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java index 50abb47e51125..87b107d5bd139 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java @@ -720,38 +720,4 @@ public void testParseWithDynamicMappingInvalidRoutingHash() { }); assertThat(failure.getMessage(), equalTo("[5:1] failed to parse: Illegal base64 character 20")); } - - public void testParseWithDynamicMappingNullId() { - Settings indexSettings = Settings.builder() - .put(IndexSettings.MODE.getKey(), "time_series") - .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim") - .build(); - MapperService mapper = createMapperService(IndexVersion.current(), indexSettings, () -> false); - SourceToParse source = new SourceToParse(null, new BytesArray(""" - { - "@timestamp": 1609459200000, - "dim": "6a841a21", - "value": 100 - }"""), XContentType.JSON); - var failure = expectThrows(DocumentParsingException.class, () -> { - IndexShard.prepareIndex( - mapper, - source, - UNASSIGNED_SEQ_NO, - randomNonNegativeLong(), - Versions.MATCH_ANY, - VersionType.INTERNAL, - Engine.Operation.Origin.PRIMARY, - -1, - false, - UNASSIGNED_SEQ_NO, - 0, - System.nanoTime() - ); - }); - assertThat( - failure.getMessage(), - equalTo("[5:1] failed to parse: _ts_routing_hash was null but must be set because index [index] is in time_series mode") - ); - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java index df5ff9a8fe7e5..5352bd446a80b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesRoutingHashFieldMapperTests.java @@ -59,6 +59,15 @@ private static ParsedDocument parseDocument(int hash, DocumentMapper docMapper, }, TimeSeriesRoutingHashFieldMapper.encode(hash))); } + private static ParsedDocument parseDocument(String id, DocumentMapper docMapper, CheckedConsumer f) + throws IOException { + // Add the @timestamp field required by DataStreamTimestampFieldMapper for all time series indices + return docMapper.parse(source(id, b -> { + f.accept(b); + b.field("@timestamp", "2021-10-01"); + }, null)); + } + private static int getRoutingHash(ParsedDocument document) { BytesRef value = document.rootDoc().getBinaryValue(TimeSeriesRoutingHashFieldMapper.NAME); return TimeSeriesRoutingHashFieldMapper.decode(Uid.decodeId(value.bytes)); @@ -76,6 +85,17 @@ public void testEnabledInTimeSeriesMode() throws Exception { assertEquals(hash, getRoutingHash(doc)); } + public void testRetrievedFromIdInTimeSeriesMode() throws Exception { + DocumentMapper docMapper = createMapper(mapping(b -> { + b.startObject("a").field("type", "keyword").field("time_series_dimension", true).endObject(); + })); + + int hash = randomInt(); + ParsedDocument doc = parseDocument(TimeSeriesRoutingHashFieldMapper.DUMMY_ENCODED_VALUE, docMapper, b -> b.field("a", "value")); + assertThat(doc.rootDoc().getField("a").binaryValue(), equalTo(new BytesRef("value"))); + assertEquals(0, getRoutingHash(doc)); + } + public void testDisabledInStandardMode() throws Exception { DocumentMapper docMapper = createMapperService( getIndexSettingsBuilder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.name()).build(), From 12e04e12dad49c9a17bb66ac2bfcf1868f15dfcd Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 25 Mar 2024 19:13:35 +1100 Subject: [PATCH 147/214] Explicitly set number_of_shards to 1 in tests (#106707) Some tests rely on the default number_of_shards to be 1. This may not hold if the default number_of_shards changes. This PR removes that assumption in the tests by explicitly configuring the number_of_shards to 1 at index creation time. Relates: #100171 Relates: ES-7911 --- .../resources/rest-api-spec/test/aggregations/time_series.yml | 1 + .../resources/rest-api-spec/test/count/30_min_score.yml | 3 +++ .../test/search.vectors/160_knn_query_missing_params.yml | 2 ++ 3 files changed, 6 insertions(+) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml index 9e8ec6b3f6768..421c0c5800949 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/time_series.yml @@ -9,6 +9,7 @@ setup: index: tsdb body: settings: + number_of_shards: 1 mode: time_series routing_path: [key] time_series: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml index 278a7095add5e..8bbfb5cff7ed3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml @@ -3,6 +3,9 @@ - do: indices.create: index: test_count_min_score + body: + settings: + number_of_shards: 1 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 5194c95151eda..9ff6319a01af4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -6,6 +6,8 @@ setup: indices.create: index: knn_query_test_index body: + settings: + number_of_shards: 1 mappings: properties: vector: From 1b2ed4c7a2064178788bfb541b878f56cb34b085 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Mon, 25 Mar 2024 10:36:18 +0100 Subject: [PATCH 148/214] ESQL: Fix count pushdown for unmapped fields (#106690) Assume that unmapped fields are multi-valued per default. --- .../qa/testFixtures/src/main/resources/stats.csv-spec | 9 +++++++++ .../org/elasticsearch/xpack/esql/stats/SearchStats.java | 3 ++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 4aff4c689c077..917735040c61d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1217,3 +1217,12 @@ FROM airports c:l 891 ; + +countMV#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS vals = COUNT(salary_change.int) +; + +vals:l +183 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index c813308ea0443..679781a40c869 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -195,7 +195,8 @@ public boolean isSingleValue(String field) { if (exists(field) == false) { stat.singleValue = true; } else { - var sv = new boolean[] { true }; + // fields are MV per default + var sv = new boolean[] { false }; for (SearchContext context : contexts) { var sec = context.getSearchExecutionContext(); MappedFieldType mappedType = sec.isFieldMapped(field) ? null : sec.getFieldType(field); From 9ba2e5f7c53c78ee7044b8bc722d4d2110edc889 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 25 Mar 2024 12:06:47 +0100 Subject: [PATCH 149/214] Remove duplicate ForUtil (#105886) We copied this into our codebase twice. The version with customizable block size is unnecessary, we always use the same block size anyway. => no need to duplicate 1k lines. --- .../index/codec/{tsdb => }/ForUtil.java | 43 +- .../codec/postings/ES812PostingsFormat.java | 1 + .../codec/postings/ES812PostingsReader.java | 3 +- .../codec/postings/ES812PostingsWriter.java | 1 + .../index/codec/postings/ES812SkipReader.java | 1 + .../index/codec/postings/ForUtil.java | 1049 ----------------- .../index/codec/postings/PForUtil.java | 1 + .../index/codec/tsdb/DocValuesForUtil.java | 1 + .../codec/{postings => }/ForUtilTests.java | 2 +- .../codec/tsdb/DocValuesForUtilTests.java | 1 + 10 files changed, 25 insertions(+), 1078 deletions(-) rename server/src/main/java/org/elasticsearch/index/codec/{tsdb => }/ForUtil.java (97%) delete mode 100644 server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java rename server/src/test/java/org/elasticsearch/index/codec/{postings => }/ForUtilTests.java (98%) diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/ForUtil.java similarity index 97% rename from server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java rename to server/src/main/java/org/elasticsearch/index/codec/ForUtil.java index 874b90a08b920..5687b0d1b687d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/ForUtil.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.index.codec.tsdb; +package org.elasticsearch.index.codec; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; @@ -20,20 +20,9 @@ // else we pack 2 ints per long public final class ForUtil { - static final int DEFAULT_BLOCK_SIZE = 128; - private final int blockSize; - private final int blockSizeLog2; - private final long[] tmp; - - public ForUtil() { - this(DEFAULT_BLOCK_SIZE); - } - - private ForUtil(int blockSize) { - this.blockSize = blockSize; - this.blockSizeLog2 = (int) (Math.log(blockSize) / Math.log(2)); - this.tmp = new long[blockSize / 2]; - } + public static final int BLOCK_SIZE = 128; + private static final int BLOCK_SIZE_LOG2 = 7; + private final long[] tmp = new long[BLOCK_SIZE / 2]; private static long expandMask32(long mask32) { return mask32 | (mask32 << 32); @@ -129,20 +118,20 @@ private static void collapse32(long[] arr) { } /** Encode 128 integers from {@code longs} into {@code out}. */ - void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { + public void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { final int nextPrimitive; final int numLongs; if (bitsPerValue <= 8) { nextPrimitive = 8; - numLongs = blockSize / 8; + numLongs = BLOCK_SIZE / 8; collapse8(longs); } else if (bitsPerValue <= 16) { nextPrimitive = 16; - numLongs = blockSize / 4; + numLongs = BLOCK_SIZE / 4; collapse16(longs); } else { nextPrimitive = 32; - numLongs = blockSize / 2; + numLongs = BLOCK_SIZE / 2; collapse32(longs); } @@ -202,11 +191,11 @@ void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { } /** Number of bytes required to encode 128 integers of {@code bitsPerValue} bits per value. */ - int numBytes(int bitsPerValue) { - return bitsPerValue << (blockSizeLog2 - 3); + public int numBytes(int bitsPerValue) { + return bitsPerValue << (BLOCK_SIZE_LOG2 - 3); } - private static void decodeSlow(int blockSize, int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { + private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { final int numLongs = bitsPerValue << 1; in.readLongs(tmp, 0, numLongs); final long mask = MASKS32[bitsPerValue]; @@ -220,7 +209,7 @@ private static void decodeSlow(int blockSize, int bitsPerValue, DataInput in, lo final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong]; int tmpIdx = 0; int remainingBits = remainingBitsPerLong; - for (; longsIdx < blockSize / 2; ++longsIdx) { + for (; longsIdx < BLOCK_SIZE / 2; ++longsIdx) { int b = bitsPerValue - remainingBits; long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b; while (b >= remainingBitsPerLong) { @@ -310,7 +299,7 @@ private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, private static final long MASK32_24 = MASKS32[24]; /** Decode 128 integers into {@code longs}. */ - void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { + public void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { switch (bitsPerValue) { case 1: decode1(in, tmp, longs); @@ -409,7 +398,7 @@ void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { expand32(longs); break; default: - decodeSlow(blockSize, bitsPerValue, in, tmp, longs); + decodeSlow(bitsPerValue, in, tmp, longs); expand32(longs); break; } @@ -421,7 +410,7 @@ void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { * [0..63], and values [64..127] are encoded in the low-order bits of {@code longs} [0..63]. This * representation may allow subsequent operations to be performed on two values at a time. */ - void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { + public void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { switch (bitsPerValue) { case 1: decode1(in, tmp, longs); @@ -512,7 +501,7 @@ void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException decode24(in, tmp, longs); break; default: - decodeSlow(blockSize, bitsPerValue, in, tmp, longs); + decodeSlow(bitsPerValue, in, tmp, longs); break; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java index 5270326876e08..6ccfaba7853f2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java @@ -36,6 +36,7 @@ import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java index 8b3d5d02a04c0..11bd90cd31610 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java @@ -36,11 +36,13 @@ import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; import java.io.IOException; import java.util.Arrays; +import static org.elasticsearch.index.codec.ForUtil.BLOCK_SIZE; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.DOC_CODEC; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.MAX_SKIP_LEVELS; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.PAY_CODEC; @@ -48,7 +50,6 @@ import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.TERMS_CODEC; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_CURRENT; import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_START; -import static org.elasticsearch.index.codec.postings.ForUtil.BLOCK_SIZE; /** * Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format. diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java index 9ab7ed42efb09..cc95b4ffcfacf 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.ForUtil; import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java index 11c0c611312fc..f9b36114361ca 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java @@ -21,6 +21,7 @@ import org.apache.lucene.codecs.MultiLevelSkipListReader; import org.apache.lucene.store.IndexInput; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java deleted file mode 100644 index d874caab1b8c0..0000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java +++ /dev/null @@ -1,1049 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2022 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.postings; - -import org.apache.lucene.store.DataInput; -import org.apache.lucene.store.DataOutput; - -import java.io.IOException; - -// Inspired from https://fulmicoton.com/posts/bitpacking/ -// Encodes multiple integers in a long to get SIMD-like speedups. -// If bitsPerValue <= 8 then we pack 8 ints per long -// else if bitsPerValue <= 16 we pack 4 ints per long -// else we pack 2 ints per long -final class ForUtil { - - static final int BLOCK_SIZE = 128; - private static final int BLOCK_SIZE_LOG2 = 7; - - private static long expandMask32(long mask32) { - return mask32 | (mask32 << 32); - } - - private static long expandMask16(long mask16) { - return expandMask32(mask16 | (mask16 << 16)); - } - - private static long expandMask8(long mask8) { - return expandMask16(mask8 | (mask8 << 8)); - } - - private static long mask32(int bitsPerValue) { - return expandMask32((1L << bitsPerValue) - 1); - } - - private static long mask16(int bitsPerValue) { - return expandMask16((1L << bitsPerValue) - 1); - } - - private static long mask8(int bitsPerValue) { - return expandMask8((1L << bitsPerValue) - 1); - } - - private static void expand8(long[] arr) { - for (int i = 0; i < 16; ++i) { - long l = arr[i]; - arr[i] = (l >>> 56) & 0xFFL; - arr[16 + i] = (l >>> 48) & 0xFFL; - arr[32 + i] = (l >>> 40) & 0xFFL; - arr[48 + i] = (l >>> 32) & 0xFFL; - arr[64 + i] = (l >>> 24) & 0xFFL; - arr[80 + i] = (l >>> 16) & 0xFFL; - arr[96 + i] = (l >>> 8) & 0xFFL; - arr[112 + i] = l & 0xFFL; - } - } - - private static void expand8To32(long[] arr) { - for (int i = 0; i < 16; ++i) { - long l = arr[i]; - arr[i] = (l >>> 24) & 0x000000FF000000FFL; - arr[16 + i] = (l >>> 16) & 0x000000FF000000FFL; - arr[32 + i] = (l >>> 8) & 0x000000FF000000FFL; - arr[48 + i] = l & 0x000000FF000000FFL; - } - } - - private static void collapse8(long[] arr) { - for (int i = 0; i < 16; ++i) { - arr[i] = (arr[i] << 56) | (arr[16 + i] << 48) | (arr[32 + i] << 40) | (arr[48 + i] << 32) | (arr[64 + i] << 24) | (arr[80 + i] - << 16) | (arr[96 + i] << 8) | arr[112 + i]; - } - } - - private static void expand16(long[] arr) { - for (int i = 0; i < 32; ++i) { - long l = arr[i]; - arr[i] = (l >>> 48) & 0xFFFFL; - arr[32 + i] = (l >>> 32) & 0xFFFFL; - arr[64 + i] = (l >>> 16) & 0xFFFFL; - arr[96 + i] = l & 0xFFFFL; - } - } - - private static void expand16To32(long[] arr) { - for (int i = 0; i < 32; ++i) { - long l = arr[i]; - arr[i] = (l >>> 16) & 0x0000FFFF0000FFFFL; - arr[32 + i] = l & 0x0000FFFF0000FFFFL; - } - } - - private static void collapse16(long[] arr) { - for (int i = 0; i < 32; ++i) { - arr[i] = (arr[i] << 48) | (arr[32 + i] << 32) | (arr[64 + i] << 16) | arr[96 + i]; - } - } - - private static void expand32(long[] arr) { - for (int i = 0; i < 64; ++i) { - long l = arr[i]; - arr[i] = l >>> 32; - arr[64 + i] = l & 0xFFFFFFFFL; - } - } - - private static void collapse32(long[] arr) { - for (int i = 0; i < 64; ++i) { - arr[i] = (arr[i] << 32) | arr[64 + i]; - } - } - - private final long[] tmp = new long[BLOCK_SIZE / 2]; - - /** Encode 128 integers from {@code longs} into {@code out}. */ - void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { - final int nextPrimitive; - final int numLongs; - if (bitsPerValue <= 8) { - nextPrimitive = 8; - numLongs = BLOCK_SIZE / 8; - collapse8(longs); - } else if (bitsPerValue <= 16) { - nextPrimitive = 16; - numLongs = BLOCK_SIZE / 4; - collapse16(longs); - } else { - nextPrimitive = 32; - numLongs = BLOCK_SIZE / 2; - collapse32(longs); - } - - final int numLongsPerShift = bitsPerValue * 2; - int idx = 0; - int shift = nextPrimitive - bitsPerValue; - for (int i = 0; i < numLongsPerShift; ++i) { - tmp[i] = longs[idx++] << shift; - } - for (shift = shift - bitsPerValue; shift >= 0; shift -= bitsPerValue) { - for (int i = 0; i < numLongsPerShift; ++i) { - tmp[i] |= longs[idx++] << shift; - } - } - - final int remainingBitsPerLong = shift + bitsPerValue; - final long maskRemainingBitsPerLong; - if (nextPrimitive == 8) { - maskRemainingBitsPerLong = MASKS8[remainingBitsPerLong]; - } else if (nextPrimitive == 16) { - maskRemainingBitsPerLong = MASKS16[remainingBitsPerLong]; - } else { - maskRemainingBitsPerLong = MASKS32[remainingBitsPerLong]; - } - - int tmpIdx = 0; - int remainingBitsPerValue = bitsPerValue; - while (idx < numLongs) { - if (remainingBitsPerValue >= remainingBitsPerLong) { - remainingBitsPerValue -= remainingBitsPerLong; - tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & maskRemainingBitsPerLong; - if (remainingBitsPerValue == 0) { - idx++; - remainingBitsPerValue = bitsPerValue; - } - } else { - final long mask1, mask2; - if (nextPrimitive == 8) { - mask1 = MASKS8[remainingBitsPerValue]; - mask2 = MASKS8[remainingBitsPerLong - remainingBitsPerValue]; - } else if (nextPrimitive == 16) { - mask1 = MASKS16[remainingBitsPerValue]; - mask2 = MASKS16[remainingBitsPerLong - remainingBitsPerValue]; - } else { - mask1 = MASKS32[remainingBitsPerValue]; - mask2 = MASKS32[remainingBitsPerLong - remainingBitsPerValue]; - } - tmp[tmpIdx] |= (longs[idx++] & mask1) << (remainingBitsPerLong - remainingBitsPerValue); - remainingBitsPerValue = bitsPerValue - remainingBitsPerLong + remainingBitsPerValue; - tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & mask2; - } - } - - for (int i = 0; i < numLongsPerShift; ++i) { - out.writeLong(tmp[i]); - } - } - - /** Number of bytes required to encode 128 integers of {@code bitsPerValue} bits per value. */ - int numBytes(int bitsPerValue) { - return bitsPerValue << (BLOCK_SIZE_LOG2 - 3); - } - - private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { - final int numLongs = bitsPerValue << 1; - in.readLongs(tmp, 0, numLongs); - final long mask = MASKS32[bitsPerValue]; - int longsIdx = 0; - int shift = 32 - bitsPerValue; - for (; shift >= 0; shift -= bitsPerValue) { - shiftLongs(tmp, numLongs, longs, longsIdx, shift, mask); - longsIdx += numLongs; - } - final int remainingBitsPerLong = shift + bitsPerValue; - final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong]; - int tmpIdx = 0; - int remainingBits = remainingBitsPerLong; - for (; longsIdx < BLOCK_SIZE / 2; ++longsIdx) { - int b = bitsPerValue - remainingBits; - long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b; - while (b >= remainingBitsPerLong) { - b -= remainingBitsPerLong; - l |= (tmp[tmpIdx++] & mask32RemainingBitsPerLong) << b; - } - if (b > 0) { - l |= (tmp[tmpIdx] >>> (remainingBitsPerLong - b)) & MASKS32[b]; - remainingBits = remainingBitsPerLong - b; - } else { - remainingBits = remainingBitsPerLong; - } - longs[longsIdx] = l; - } - } - - /** - * The pattern that this shiftLongs method applies is recognized by the C2 compiler, which - * generates SIMD instructions for it in order to shift multiple longs at once. - */ - private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, long mask) { - for (int i = 0; i < count; ++i) { - b[bi + i] = (a[i] >>> shift) & mask; - } - } - - private static final long[] MASKS8 = new long[8]; - private static final long[] MASKS16 = new long[16]; - private static final long[] MASKS32 = new long[32]; - - static { - for (int i = 0; i < 8; ++i) { - MASKS8[i] = mask8(i); - } - for (int i = 0; i < 16; ++i) { - MASKS16[i] = mask16(i); - } - for (int i = 0; i < 32; ++i) { - MASKS32[i] = mask32(i); - } - } - - // mark values in array as final longs to avoid the cost of reading array, arrays should only be - // used when the idx is a variable - private static final long MASK8_1 = MASKS8[1]; - private static final long MASK8_2 = MASKS8[2]; - private static final long MASK8_3 = MASKS8[3]; - private static final long MASK8_4 = MASKS8[4]; - private static final long MASK8_5 = MASKS8[5]; - private static final long MASK8_6 = MASKS8[6]; - private static final long MASK8_7 = MASKS8[7]; - private static final long MASK16_1 = MASKS16[1]; - private static final long MASK16_2 = MASKS16[2]; - private static final long MASK16_3 = MASKS16[3]; - private static final long MASK16_4 = MASKS16[4]; - private static final long MASK16_5 = MASKS16[5]; - private static final long MASK16_6 = MASKS16[6]; - private static final long MASK16_7 = MASKS16[7]; - private static final long MASK16_9 = MASKS16[9]; - private static final long MASK16_10 = MASKS16[10]; - private static final long MASK16_11 = MASKS16[11]; - private static final long MASK16_12 = MASKS16[12]; - private static final long MASK16_13 = MASKS16[13]; - private static final long MASK16_14 = MASKS16[14]; - private static final long MASK16_15 = MASKS16[15]; - private static final long MASK32_1 = MASKS32[1]; - private static final long MASK32_2 = MASKS32[2]; - private static final long MASK32_3 = MASKS32[3]; - private static final long MASK32_4 = MASKS32[4]; - private static final long MASK32_5 = MASKS32[5]; - private static final long MASK32_6 = MASKS32[6]; - private static final long MASK32_7 = MASKS32[7]; - private static final long MASK32_8 = MASKS32[8]; - private static final long MASK32_9 = MASKS32[9]; - private static final long MASK32_10 = MASKS32[10]; - private static final long MASK32_11 = MASKS32[11]; - private static final long MASK32_12 = MASKS32[12]; - private static final long MASK32_13 = MASKS32[13]; - private static final long MASK32_14 = MASKS32[14]; - private static final long MASK32_15 = MASKS32[15]; - private static final long MASK32_17 = MASKS32[17]; - private static final long MASK32_18 = MASKS32[18]; - private static final long MASK32_19 = MASKS32[19]; - private static final long MASK32_20 = MASKS32[20]; - private static final long MASK32_21 = MASKS32[21]; - private static final long MASK32_22 = MASKS32[22]; - private static final long MASK32_23 = MASKS32[23]; - private static final long MASK32_24 = MASKS32[24]; - - /** Decode 128 integers into {@code longs}. */ - void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { - switch (bitsPerValue) { - case 1: - decode1(in, tmp, longs); - expand8(longs); - break; - case 2: - decode2(in, tmp, longs); - expand8(longs); - break; - case 3: - decode3(in, tmp, longs); - expand8(longs); - break; - case 4: - decode4(in, tmp, longs); - expand8(longs); - break; - case 5: - decode5(in, tmp, longs); - expand8(longs); - break; - case 6: - decode6(in, tmp, longs); - expand8(longs); - break; - case 7: - decode7(in, tmp, longs); - expand8(longs); - break; - case 8: - decode8(in, tmp, longs); - expand8(longs); - break; - case 9: - decode9(in, tmp, longs); - expand16(longs); - break; - case 10: - decode10(in, tmp, longs); - expand16(longs); - break; - case 11: - decode11(in, tmp, longs); - expand16(longs); - break; - case 12: - decode12(in, tmp, longs); - expand16(longs); - break; - case 13: - decode13(in, tmp, longs); - expand16(longs); - break; - case 14: - decode14(in, tmp, longs); - expand16(longs); - break; - case 15: - decode15(in, tmp, longs); - expand16(longs); - break; - case 16: - decode16(in, tmp, longs); - expand16(longs); - break; - case 17: - decode17(in, tmp, longs); - expand32(longs); - break; - case 18: - decode18(in, tmp, longs); - expand32(longs); - break; - case 19: - decode19(in, tmp, longs); - expand32(longs); - break; - case 20: - decode20(in, tmp, longs); - expand32(longs); - break; - case 21: - decode21(in, tmp, longs); - expand32(longs); - break; - case 22: - decode22(in, tmp, longs); - expand32(longs); - break; - case 23: - decode23(in, tmp, longs); - expand32(longs); - break; - case 24: - decode24(in, tmp, longs); - expand32(longs); - break; - default: - decodeSlow(bitsPerValue, in, tmp, longs); - expand32(longs); - break; - } - } - - /** - * Decodes 128 integers into 64 {@code longs} such that each long contains two values, each - * represented with 32 bits. Values [0..63] are encoded in the high-order bits of {@code longs} - * [0..63], and values [64..127] are encoded in the low-order bits of {@code longs} [0..63]. This - * representation may allow subsequent operations to be performed on two values at a time. - */ - void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { - switch (bitsPerValue) { - case 1: - decode1(in, tmp, longs); - expand8To32(longs); - break; - case 2: - decode2(in, tmp, longs); - expand8To32(longs); - break; - case 3: - decode3(in, tmp, longs); - expand8To32(longs); - break; - case 4: - decode4(in, tmp, longs); - expand8To32(longs); - break; - case 5: - decode5(in, tmp, longs); - expand8To32(longs); - break; - case 6: - decode6(in, tmp, longs); - expand8To32(longs); - break; - case 7: - decode7(in, tmp, longs); - expand8To32(longs); - break; - case 8: - decode8(in, tmp, longs); - expand8To32(longs); - break; - case 9: - decode9(in, tmp, longs); - expand16To32(longs); - break; - case 10: - decode10(in, tmp, longs); - expand16To32(longs); - break; - case 11: - decode11(in, tmp, longs); - expand16To32(longs); - break; - case 12: - decode12(in, tmp, longs); - expand16To32(longs); - break; - case 13: - decode13(in, tmp, longs); - expand16To32(longs); - break; - case 14: - decode14(in, tmp, longs); - expand16To32(longs); - break; - case 15: - decode15(in, tmp, longs); - expand16To32(longs); - break; - case 16: - decode16(in, tmp, longs); - expand16To32(longs); - break; - case 17: - decode17(in, tmp, longs); - break; - case 18: - decode18(in, tmp, longs); - break; - case 19: - decode19(in, tmp, longs); - break; - case 20: - decode20(in, tmp, longs); - break; - case 21: - decode21(in, tmp, longs); - break; - case 22: - decode22(in, tmp, longs); - break; - case 23: - decode23(in, tmp, longs); - break; - case 24: - decode24(in, tmp, longs); - break; - default: - decodeSlow(bitsPerValue, in, tmp, longs); - break; - } - } - - private static void decode1(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 2); - shiftLongs(tmp, 2, longs, 0, 7, MASK8_1); - shiftLongs(tmp, 2, longs, 2, 6, MASK8_1); - shiftLongs(tmp, 2, longs, 4, 5, MASK8_1); - shiftLongs(tmp, 2, longs, 6, 4, MASK8_1); - shiftLongs(tmp, 2, longs, 8, 3, MASK8_1); - shiftLongs(tmp, 2, longs, 10, 2, MASK8_1); - shiftLongs(tmp, 2, longs, 12, 1, MASK8_1); - shiftLongs(tmp, 2, longs, 14, 0, MASK8_1); - } - - private static void decode2(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 4); - shiftLongs(tmp, 4, longs, 0, 6, MASK8_2); - shiftLongs(tmp, 4, longs, 4, 4, MASK8_2); - shiftLongs(tmp, 4, longs, 8, 2, MASK8_2); - shiftLongs(tmp, 4, longs, 12, 0, MASK8_2); - } - - private static void decode3(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 6); - shiftLongs(tmp, 6, longs, 0, 5, MASK8_3); - shiftLongs(tmp, 6, longs, 6, 2, MASK8_3); - for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 2; ++iter, tmpIdx += 3, longsIdx += 2) { - long l0 = (tmp[tmpIdx + 0] & MASK8_2) << 1; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 2; - l1 |= (tmp[tmpIdx + 2] & MASK8_2) << 0; - longs[longsIdx + 1] = l1; - } - } - - private static void decode4(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 8); - shiftLongs(tmp, 8, longs, 0, 4, MASK8_4); - shiftLongs(tmp, 8, longs, 8, 0, MASK8_4); - } - - private static void decode5(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 10); - shiftLongs(tmp, 10, longs, 0, 3, MASK8_5); - for (int iter = 0, tmpIdx = 0, longsIdx = 10; iter < 2; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK8_3) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 4; - l1 |= (tmp[tmpIdx + 2] & MASK8_3) << 1; - l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK8_1; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK8_2) << 3; - l2 |= (tmp[tmpIdx + 4] & MASK8_3) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode6(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 12); - shiftLongs(tmp, 12, longs, 0, 2, MASK8_6); - shiftLongs(tmp, 12, tmp, 0, 0, MASK8_2); - for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 4; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 4; - l0 |= tmp[tmpIdx + 1] << 2; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode7(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 14); - shiftLongs(tmp, 14, longs, 0, 1, MASK8_7); - shiftLongs(tmp, 14, tmp, 0, 0, MASK8_1); - for (int iter = 0, tmpIdx = 0, longsIdx = 14; iter < 2; ++iter, tmpIdx += 7, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 6; - l0 |= tmp[tmpIdx + 1] << 5; - l0 |= tmp[tmpIdx + 2] << 4; - l0 |= tmp[tmpIdx + 3] << 3; - l0 |= tmp[tmpIdx + 4] << 2; - l0 |= tmp[tmpIdx + 5] << 1; - l0 |= tmp[tmpIdx + 6] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode8(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(longs, 0, 16); - } - - private static void decode9(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 18); - shiftLongs(tmp, 18, longs, 0, 7, MASK16_9); - for (int iter = 0, tmpIdx = 0, longsIdx = 18; iter < 2; ++iter, tmpIdx += 9, longsIdx += 7) { - long l0 = (tmp[tmpIdx + 0] & MASK16_7) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 5) & MASK16_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK16_5) << 4; - l1 |= (tmp[tmpIdx + 2] >>> 3) & MASK16_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK16_3) << 6; - l2 |= (tmp[tmpIdx + 3] >>> 1) & MASK16_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK16_1) << 8; - l3 |= (tmp[tmpIdx + 4] & MASK16_7) << 1; - l3 |= (tmp[tmpIdx + 5] >>> 6) & MASK16_1; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK16_6) << 3; - l4 |= (tmp[tmpIdx + 6] >>> 4) & MASK16_3; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 6] & MASK16_4) << 5; - l5 |= (tmp[tmpIdx + 7] >>> 2) & MASK16_5; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 7] & MASK16_2) << 7; - l6 |= (tmp[tmpIdx + 8] & MASK16_7) << 0; - longs[longsIdx + 6] = l6; - } - } - - private static void decode10(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 20); - shiftLongs(tmp, 20, longs, 0, 6, MASK16_10); - for (int iter = 0, tmpIdx = 0, longsIdx = 20; iter < 4; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK16_6) << 4; - l0 |= (tmp[tmpIdx + 1] >>> 2) & MASK16_4; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK16_2) << 8; - l1 |= (tmp[tmpIdx + 2] & MASK16_6) << 2; - l1 |= (tmp[tmpIdx + 3] >>> 4) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK16_4) << 6; - l2 |= (tmp[tmpIdx + 4] & MASK16_6) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode11(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 22); - shiftLongs(tmp, 22, longs, 0, 5, MASK16_11); - for (int iter = 0, tmpIdx = 0, longsIdx = 22; iter < 2; ++iter, tmpIdx += 11, longsIdx += 5) { - long l0 = (tmp[tmpIdx + 0] & MASK16_5) << 6; - l0 |= (tmp[tmpIdx + 1] & MASK16_5) << 1; - l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK16_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK16_4) << 7; - l1 |= (tmp[tmpIdx + 3] & MASK16_5) << 2; - l1 |= (tmp[tmpIdx + 4] >>> 3) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 4] & MASK16_3) << 8; - l2 |= (tmp[tmpIdx + 5] & MASK16_5) << 3; - l2 |= (tmp[tmpIdx + 6] >>> 2) & MASK16_3; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 6] & MASK16_2) << 9; - l3 |= (tmp[tmpIdx + 7] & MASK16_5) << 4; - l3 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_4; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 8] & MASK16_1) << 10; - l4 |= (tmp[tmpIdx + 9] & MASK16_5) << 5; - l4 |= (tmp[tmpIdx + 10] & MASK16_5) << 0; - longs[longsIdx + 4] = l4; - } - } - - private static void decode12(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 24); - shiftLongs(tmp, 24, longs, 0, 4, MASK16_12); - shiftLongs(tmp, 24, tmp, 0, 0, MASK16_4); - for (int iter = 0, tmpIdx = 0, longsIdx = 24; iter < 8; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 8; - l0 |= tmp[tmpIdx + 1] << 4; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode13(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 26); - shiftLongs(tmp, 26, longs, 0, 3, MASK16_13); - for (int iter = 0, tmpIdx = 0, longsIdx = 26; iter < 2; ++iter, tmpIdx += 13, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK16_3) << 10; - l0 |= (tmp[tmpIdx + 1] & MASK16_3) << 7; - l0 |= (tmp[tmpIdx + 2] & MASK16_3) << 4; - l0 |= (tmp[tmpIdx + 3] & MASK16_3) << 1; - l0 |= (tmp[tmpIdx + 4] >>> 2) & MASK16_1; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 4] & MASK16_2) << 11; - l1 |= (tmp[tmpIdx + 5] & MASK16_3) << 8; - l1 |= (tmp[tmpIdx + 6] & MASK16_3) << 5; - l1 |= (tmp[tmpIdx + 7] & MASK16_3) << 2; - l1 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_2; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 8] & MASK16_1) << 12; - l2 |= (tmp[tmpIdx + 9] & MASK16_3) << 9; - l2 |= (tmp[tmpIdx + 10] & MASK16_3) << 6; - l2 |= (tmp[tmpIdx + 11] & MASK16_3) << 3; - l2 |= (tmp[tmpIdx + 12] & MASK16_3) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode14(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 28); - shiftLongs(tmp, 28, longs, 0, 2, MASK16_14); - shiftLongs(tmp, 28, tmp, 0, 0, MASK16_2); - for (int iter = 0, tmpIdx = 0, longsIdx = 28; iter < 4; ++iter, tmpIdx += 7, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 12; - l0 |= tmp[tmpIdx + 1] << 10; - l0 |= tmp[tmpIdx + 2] << 8; - l0 |= tmp[tmpIdx + 3] << 6; - l0 |= tmp[tmpIdx + 4] << 4; - l0 |= tmp[tmpIdx + 5] << 2; - l0 |= tmp[tmpIdx + 6] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode15(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 30); - shiftLongs(tmp, 30, longs, 0, 1, MASK16_15); - shiftLongs(tmp, 30, tmp, 0, 0, MASK16_1); - for (int iter = 0, tmpIdx = 0, longsIdx = 30; iter < 2; ++iter, tmpIdx += 15, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 14; - l0 |= tmp[tmpIdx + 1] << 13; - l0 |= tmp[tmpIdx + 2] << 12; - l0 |= tmp[tmpIdx + 3] << 11; - l0 |= tmp[tmpIdx + 4] << 10; - l0 |= tmp[tmpIdx + 5] << 9; - l0 |= tmp[tmpIdx + 6] << 8; - l0 |= tmp[tmpIdx + 7] << 7; - l0 |= tmp[tmpIdx + 8] << 6; - l0 |= tmp[tmpIdx + 9] << 5; - l0 |= tmp[tmpIdx + 10] << 4; - l0 |= tmp[tmpIdx + 11] << 3; - l0 |= tmp[tmpIdx + 12] << 2; - l0 |= tmp[tmpIdx + 13] << 1; - l0 |= tmp[tmpIdx + 14] << 0; - longs[longsIdx + 0] = l0; - } - } - - private static void decode16(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(longs, 0, 32); - } - - private static void decode17(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 34); - shiftLongs(tmp, 34, longs, 0, 15, MASK32_17); - for (int iter = 0, tmpIdx = 0, longsIdx = 34; iter < 2; ++iter, tmpIdx += 17, longsIdx += 15) { - long l0 = (tmp[tmpIdx + 0] & MASK32_15) << 2; - l0 |= (tmp[tmpIdx + 1] >>> 13) & MASK32_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_13) << 4; - l1 |= (tmp[tmpIdx + 2] >>> 11) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_11) << 6; - l2 |= (tmp[tmpIdx + 3] >>> 9) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK32_9) << 8; - l3 |= (tmp[tmpIdx + 4] >>> 7) & MASK32_8; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 4] & MASK32_7) << 10; - l4 |= (tmp[tmpIdx + 5] >>> 5) & MASK32_10; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 5] & MASK32_5) << 12; - l5 |= (tmp[tmpIdx + 6] >>> 3) & MASK32_12; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 6] & MASK32_3) << 14; - l6 |= (tmp[tmpIdx + 7] >>> 1) & MASK32_14; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 7] & MASK32_1) << 16; - l7 |= (tmp[tmpIdx + 8] & MASK32_15) << 1; - l7 |= (tmp[tmpIdx + 9] >>> 14) & MASK32_1; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 9] & MASK32_14) << 3; - l8 |= (tmp[tmpIdx + 10] >>> 12) & MASK32_3; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 10] & MASK32_12) << 5; - l9 |= (tmp[tmpIdx + 11] >>> 10) & MASK32_5; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 11] & MASK32_10) << 7; - l10 |= (tmp[tmpIdx + 12] >>> 8) & MASK32_7; - longs[longsIdx + 10] = l10; - long l11 = (tmp[tmpIdx + 12] & MASK32_8) << 9; - l11 |= (tmp[tmpIdx + 13] >>> 6) & MASK32_9; - longs[longsIdx + 11] = l11; - long l12 = (tmp[tmpIdx + 13] & MASK32_6) << 11; - l12 |= (tmp[tmpIdx + 14] >>> 4) & MASK32_11; - longs[longsIdx + 12] = l12; - long l13 = (tmp[tmpIdx + 14] & MASK32_4) << 13; - l13 |= (tmp[tmpIdx + 15] >>> 2) & MASK32_13; - longs[longsIdx + 13] = l13; - long l14 = (tmp[tmpIdx + 15] & MASK32_2) << 15; - l14 |= (tmp[tmpIdx + 16] & MASK32_15) << 0; - longs[longsIdx + 14] = l14; - } - } - - private static void decode18(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 36); - shiftLongs(tmp, 36, longs, 0, 14, MASK32_18); - for (int iter = 0, tmpIdx = 0, longsIdx = 36; iter < 4; ++iter, tmpIdx += 9, longsIdx += 7) { - long l0 = (tmp[tmpIdx + 0] & MASK32_14) << 4; - l0 |= (tmp[tmpIdx + 1] >>> 10) & MASK32_4; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_10) << 8; - l1 |= (tmp[tmpIdx + 2] >>> 6) & MASK32_8; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_6) << 12; - l2 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_12; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 3] & MASK32_2) << 16; - l3 |= (tmp[tmpIdx + 4] & MASK32_14) << 2; - l3 |= (tmp[tmpIdx + 5] >>> 12) & MASK32_2; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK32_12) << 6; - l4 |= (tmp[tmpIdx + 6] >>> 8) & MASK32_6; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 6] & MASK32_8) << 10; - l5 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_10; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 7] & MASK32_4) << 14; - l6 |= (tmp[tmpIdx + 8] & MASK32_14) << 0; - longs[longsIdx + 6] = l6; - } - } - - private static void decode19(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 38); - shiftLongs(tmp, 38, longs, 0, 13, MASK32_19); - for (int iter = 0, tmpIdx = 0, longsIdx = 38; iter < 2; ++iter, tmpIdx += 19, longsIdx += 13) { - long l0 = (tmp[tmpIdx + 0] & MASK32_13) << 6; - l0 |= (tmp[tmpIdx + 1] >>> 7) & MASK32_6; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_7) << 12; - l1 |= (tmp[tmpIdx + 2] >>> 1) & MASK32_12; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 2] & MASK32_1) << 18; - l2 |= (tmp[tmpIdx + 3] & MASK32_13) << 5; - l2 |= (tmp[tmpIdx + 4] >>> 8) & MASK32_5; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 4] & MASK32_8) << 11; - l3 |= (tmp[tmpIdx + 5] >>> 2) & MASK32_11; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 5] & MASK32_2) << 17; - l4 |= (tmp[tmpIdx + 6] & MASK32_13) << 4; - l4 |= (tmp[tmpIdx + 7] >>> 9) & MASK32_4; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 7] & MASK32_9) << 10; - l5 |= (tmp[tmpIdx + 8] >>> 3) & MASK32_10; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 8] & MASK32_3) << 16; - l6 |= (tmp[tmpIdx + 9] & MASK32_13) << 3; - l6 |= (tmp[tmpIdx + 10] >>> 10) & MASK32_3; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 10] & MASK32_10) << 9; - l7 |= (tmp[tmpIdx + 11] >>> 4) & MASK32_9; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 11] & MASK32_4) << 15; - l8 |= (tmp[tmpIdx + 12] & MASK32_13) << 2; - l8 |= (tmp[tmpIdx + 13] >>> 11) & MASK32_2; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 13] & MASK32_11) << 8; - l9 |= (tmp[tmpIdx + 14] >>> 5) & MASK32_8; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 14] & MASK32_5) << 14; - l10 |= (tmp[tmpIdx + 15] & MASK32_13) << 1; - l10 |= (tmp[tmpIdx + 16] >>> 12) & MASK32_1; - longs[longsIdx + 10] = l10; - long l11 = (tmp[tmpIdx + 16] & MASK32_12) << 7; - l11 |= (tmp[tmpIdx + 17] >>> 6) & MASK32_7; - longs[longsIdx + 11] = l11; - long l12 = (tmp[tmpIdx + 17] & MASK32_6) << 13; - l12 |= (tmp[tmpIdx + 18] & MASK32_13) << 0; - longs[longsIdx + 12] = l12; - } - } - - private static void decode20(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 40); - shiftLongs(tmp, 40, longs, 0, 12, MASK32_20); - for (int iter = 0, tmpIdx = 0, longsIdx = 40; iter < 8; ++iter, tmpIdx += 5, longsIdx += 3) { - long l0 = (tmp[tmpIdx + 0] & MASK32_12) << 8; - l0 |= (tmp[tmpIdx + 1] >>> 4) & MASK32_8; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_4) << 16; - l1 |= (tmp[tmpIdx + 2] & MASK32_12) << 4; - l1 |= (tmp[tmpIdx + 3] >>> 8) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK32_8) << 12; - l2 |= (tmp[tmpIdx + 4] & MASK32_12) << 0; - longs[longsIdx + 2] = l2; - } - } - - private static void decode21(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 42); - shiftLongs(tmp, 42, longs, 0, 11, MASK32_21); - for (int iter = 0, tmpIdx = 0, longsIdx = 42; iter < 2; ++iter, tmpIdx += 21, longsIdx += 11) { - long l0 = (tmp[tmpIdx + 0] & MASK32_11) << 10; - l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK32_10; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 1] & MASK32_1) << 20; - l1 |= (tmp[tmpIdx + 2] & MASK32_11) << 9; - l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_9; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 3] & MASK32_2) << 19; - l2 |= (tmp[tmpIdx + 4] & MASK32_11) << 8; - l2 |= (tmp[tmpIdx + 5] >>> 3) & MASK32_8; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 5] & MASK32_3) << 18; - l3 |= (tmp[tmpIdx + 6] & MASK32_11) << 7; - l3 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_7; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 7] & MASK32_4) << 17; - l4 |= (tmp[tmpIdx + 8] & MASK32_11) << 6; - l4 |= (tmp[tmpIdx + 9] >>> 5) & MASK32_6; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 9] & MASK32_5) << 16; - l5 |= (tmp[tmpIdx + 10] & MASK32_11) << 5; - l5 |= (tmp[tmpIdx + 11] >>> 6) & MASK32_5; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 11] & MASK32_6) << 15; - l6 |= (tmp[tmpIdx + 12] & MASK32_11) << 4; - l6 |= (tmp[tmpIdx + 13] >>> 7) & MASK32_4; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 13] & MASK32_7) << 14; - l7 |= (tmp[tmpIdx + 14] & MASK32_11) << 3; - l7 |= (tmp[tmpIdx + 15] >>> 8) & MASK32_3; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 15] & MASK32_8) << 13; - l8 |= (tmp[tmpIdx + 16] & MASK32_11) << 2; - l8 |= (tmp[tmpIdx + 17] >>> 9) & MASK32_2; - longs[longsIdx + 8] = l8; - long l9 = (tmp[tmpIdx + 17] & MASK32_9) << 12; - l9 |= (tmp[tmpIdx + 18] & MASK32_11) << 1; - l9 |= (tmp[tmpIdx + 19] >>> 10) & MASK32_1; - longs[longsIdx + 9] = l9; - long l10 = (tmp[tmpIdx + 19] & MASK32_10) << 11; - l10 |= (tmp[tmpIdx + 20] & MASK32_11) << 0; - longs[longsIdx + 10] = l10; - } - } - - private static void decode22(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 44); - shiftLongs(tmp, 44, longs, 0, 10, MASK32_22); - for (int iter = 0, tmpIdx = 0, longsIdx = 44; iter < 4; ++iter, tmpIdx += 11, longsIdx += 5) { - long l0 = (tmp[tmpIdx + 0] & MASK32_10) << 12; - l0 |= (tmp[tmpIdx + 1] & MASK32_10) << 2; - l0 |= (tmp[tmpIdx + 2] >>> 8) & MASK32_2; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK32_8) << 14; - l1 |= (tmp[tmpIdx + 3] & MASK32_10) << 4; - l1 |= (tmp[tmpIdx + 4] >>> 6) & MASK32_4; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 4] & MASK32_6) << 16; - l2 |= (tmp[tmpIdx + 5] & MASK32_10) << 6; - l2 |= (tmp[tmpIdx + 6] >>> 4) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 6] & MASK32_4) << 18; - l3 |= (tmp[tmpIdx + 7] & MASK32_10) << 8; - l3 |= (tmp[tmpIdx + 8] >>> 2) & MASK32_8; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 8] & MASK32_2) << 20; - l4 |= (tmp[tmpIdx + 9] & MASK32_10) << 10; - l4 |= (tmp[tmpIdx + 10] & MASK32_10) << 0; - longs[longsIdx + 4] = l4; - } - } - - private static void decode23(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 46); - shiftLongs(tmp, 46, longs, 0, 9, MASK32_23); - for (int iter = 0, tmpIdx = 0, longsIdx = 46; iter < 2; ++iter, tmpIdx += 23, longsIdx += 9) { - long l0 = (tmp[tmpIdx + 0] & MASK32_9) << 14; - l0 |= (tmp[tmpIdx + 1] & MASK32_9) << 5; - l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK32_5; - longs[longsIdx + 0] = l0; - long l1 = (tmp[tmpIdx + 2] & MASK32_4) << 19; - l1 |= (tmp[tmpIdx + 3] & MASK32_9) << 10; - l1 |= (tmp[tmpIdx + 4] & MASK32_9) << 1; - l1 |= (tmp[tmpIdx + 5] >>> 8) & MASK32_1; - longs[longsIdx + 1] = l1; - long l2 = (tmp[tmpIdx + 5] & MASK32_8) << 15; - l2 |= (tmp[tmpIdx + 6] & MASK32_9) << 6; - l2 |= (tmp[tmpIdx + 7] >>> 3) & MASK32_6; - longs[longsIdx + 2] = l2; - long l3 = (tmp[tmpIdx + 7] & MASK32_3) << 20; - l3 |= (tmp[tmpIdx + 8] & MASK32_9) << 11; - l3 |= (tmp[tmpIdx + 9] & MASK32_9) << 2; - l3 |= (tmp[tmpIdx + 10] >>> 7) & MASK32_2; - longs[longsIdx + 3] = l3; - long l4 = (tmp[tmpIdx + 10] & MASK32_7) << 16; - l4 |= (tmp[tmpIdx + 11] & MASK32_9) << 7; - l4 |= (tmp[tmpIdx + 12] >>> 2) & MASK32_7; - longs[longsIdx + 4] = l4; - long l5 = (tmp[tmpIdx + 12] & MASK32_2) << 21; - l5 |= (tmp[tmpIdx + 13] & MASK32_9) << 12; - l5 |= (tmp[tmpIdx + 14] & MASK32_9) << 3; - l5 |= (tmp[tmpIdx + 15] >>> 6) & MASK32_3; - longs[longsIdx + 5] = l5; - long l6 = (tmp[tmpIdx + 15] & MASK32_6) << 17; - l6 |= (tmp[tmpIdx + 16] & MASK32_9) << 8; - l6 |= (tmp[tmpIdx + 17] >>> 1) & MASK32_8; - longs[longsIdx + 6] = l6; - long l7 = (tmp[tmpIdx + 17] & MASK32_1) << 22; - l7 |= (tmp[tmpIdx + 18] & MASK32_9) << 13; - l7 |= (tmp[tmpIdx + 19] & MASK32_9) << 4; - l7 |= (tmp[tmpIdx + 20] >>> 5) & MASK32_4; - longs[longsIdx + 7] = l7; - long l8 = (tmp[tmpIdx + 20] & MASK32_5) << 18; - l8 |= (tmp[tmpIdx + 21] & MASK32_9) << 9; - l8 |= (tmp[tmpIdx + 22] & MASK32_9) << 0; - longs[longsIdx + 8] = l8; - } - } - - private static void decode24(DataInput in, long[] tmp, long[] longs) throws IOException { - in.readLongs(tmp, 0, 48); - shiftLongs(tmp, 48, longs, 0, 8, MASK32_24); - shiftLongs(tmp, 48, tmp, 0, 0, MASK32_8); - for (int iter = 0, tmpIdx = 0, longsIdx = 48; iter < 16; ++iter, tmpIdx += 3, longsIdx += 1) { - long l0 = tmp[tmpIdx + 0] << 16; - l0 |= tmp[tmpIdx + 1] << 8; - l0 |= tmp[tmpIdx + 2] << 0; - longs[longsIdx + 0] = l0; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java index 26a600c73eeb5..46ab0b0d00671 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java @@ -23,6 +23,7 @@ import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.LongHeap; import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java index 873dcc9b87207..303c66309a23f 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtil.java @@ -11,6 +11,7 @@ import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java rename to server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java index 14e8d3344c3dc..5d9052203c5f4 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/ForUtilTests.java @@ -17,7 +17,7 @@ * * Modifications copyright (C) 2022 Elasticsearch B.V. */ -package org.elasticsearch.index.codec.postings; +package org.elasticsearch.index.codec; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java index ec8308404a118..f766d2148a96e 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/DocValuesForUtilTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.index.codec.ForUtil; import java.io.IOException; import java.util.Arrays; From 42fae6bf4ccb9c168d0f6b55c03eff06acf1b9cc Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Mon, 25 Mar 2024 11:14:49 +0000 Subject: [PATCH 150/214] Skip logging when no existing autosharding event (#106711) This skips rather confusing logging like ``` Rolling over data stream [logs-mysql.error-default] using existing auto-sharding recommendation [null] ``` Will only log this statement when there actually is an active auto sharding event that's being used in the rollover process. --- .../rollover/MetadataRolloverService.java | 26 ++++++------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index 6645de880ad86..b03353a11793f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -292,12 +292,14 @@ private RolloverResult rolloverDataStream( DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null ? dataStream.getAutoShardingEvent() : switch (autoShardingResult.type()) { - case NO_CHANGE_REQUIRED -> { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() - ); + case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { + if (dataStream.getAutoShardingEvent() != null) { + logger.info( + "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", + dataStreamName, + dataStream.getAutoShardingEvent() + ); + } yield dataStream.getAutoShardingEvent(); } case INCREASE_SHARDS, DECREASE_SHARDS -> { @@ -308,18 +310,6 @@ yield new DataStreamAutoShardingEvent( now.toEpochMilli() ); } - case COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { - // we're in the cooldown period for this particular recommendation so perhaps use a previous autosharding - // recommendation (or the value configured in the backing index template otherwise) - if (dataStream.getAutoShardingEvent() != null) { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() - ); - } - yield dataStream.getAutoShardingEvent(); - } // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes // being configured. the index template will dictate the number of shards as usual case NOT_APPLICABLE -> { From c15f727f2d86cc82febc4e3efce9d45b8e53f2c0 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 25 Mar 2024 12:16:18 +0100 Subject: [PATCH 151/214] Update Gradle wrapper to 8.7 (#105854) removed tiny issue on build script that seems like an oversight and doesn't do anything at all --- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../src/main/resources/minimumGradleVersion | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 43462 -> 43453 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- x-pack/plugin/ent-search/build.gradle | 2 -- 6 files changed, 7 insertions(+), 9 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index f043ef362390f..631c6d36a93a4 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.6 \ No newline at end of file +8.7 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index d64cd4917707c1f8861d8cb53dd15194d4248596..e6441136f3d4ba8a0da8d277868979cfbc8ad796 100644 GIT binary patch delta 34118 zcmY(qRX`kF)3u#IAjsf0xCD212@LM;?(PINyAue(f;$XO2=4Cg1P$=#e%|lo zKk1`B>Q#GH)wNd-&cJofz}3=WfYndTeo)CyX{fOHsQjGa<{e=jamMNwjdatD={CN3>GNchOE9OGPIqr)3v>RcKWR3Z zF-guIMjE2UF0Wqk1)21791y#}ciBI*bAenY*BMW_)AeSuM5}vz_~`+1i!Lo?XAEq{TlK5-efNFgHr6o zD>^vB&%3ZGEWMS>`?tu!@66|uiDvS5`?bF=gIq3rkK(j<_TybyoaDHg8;Y#`;>tXI z=tXo~e9{U!*hqTe#nZjW4z0mP8A9UUv1}C#R*@yu9G3k;`Me0-BA2&Aw6f`{Ozan2 z8c8Cs#dA-7V)ZwcGKH}jW!Ja&VaUc@mu5a@CObzNot?b{f+~+212lwF;!QKI16FDS zodx>XN$sk9;t;)maB^s6sr^L32EbMV(uvW%or=|0@U6cUkE`_!<=LHLlRGJx@gQI=B(nn z-GEjDE}*8>3U$n(t^(b^C$qSTI;}6q&ypp?-2rGpqg7b}pyT zOARu2x>0HB{&D(d3sp`+}ka+Pca5glh|c=M)Ujn_$ly^X6&u z%Q4Y*LtB_>i6(YR!?{Os-(^J`(70lZ&Hp1I^?t@~SFL1!m0x6j|NM!-JTDk)%Q^R< z@e?23FD&9_W{Bgtr&CG&*Oer3Z(Bu2EbV3T9FeQ|-vo5pwzwQ%g&=zFS7b{n6T2ZQ z*!H(=z<{D9@c`KmHO&DbUIzpg`+r5207}4D=_P$ONIc5lsFgn)UB-oUE#{r+|uHc^hzv_df zV`n8&qry%jXQ33}Bjqcim~BY1?KZ}x453Oh7G@fA(}+m(f$)TY%7n=MeLi{jJ7LMB zt(mE*vFnep?YpkT_&WPV9*f>uSi#n#@STJmV&SLZnlLsWYI@y+Bs=gzcqche=&cBH2WL)dkR!a95*Ri)JH_4c*- zl4pPLl^as5_y&6RDE@@7342DNyF&GLJez#eMJjI}#pZN{Y8io{l*D+|f_Y&RQPia@ zNDL;SBERA|B#cjlNC@VU{2csOvB8$HzU$01Q?y)KEfos>W46VMh>P~oQC8k=26-Ku)@C|n^zDP!hO}Y z_tF}0@*Ds!JMt>?4y|l3?`v#5*oV-=vL7}zehMON^=s1%q+n=^^Z{^mTs7}*->#YL z)x-~SWE{e?YCarwU$=cS>VzmUh?Q&7?#Xrcce+jeZ|%0!l|H_=D_`77hBfd4Zqk&! zq-Dnt_?5*$Wsw8zGd@?woEtfYZ2|9L8b>TO6>oMh%`B7iBb)-aCefM~q|S2Cc0t9T zlu-ZXmM0wd$!gd-dTtik{bqyx32%f;`XUvbUWWJmpHfk8^PQIEsByJm+@+-aj4J#D z4#Br3pO6z1eIC>X^yKk|PeVwX_4B+IYJyJyc3B`4 zPrM#raacGIzVOexcVB;fcsxS=s1e&V;Xe$tw&KQ`YaCkHTKe*Al#velxV{3wxx}`7@isG zp6{+s)CG%HF#JBAQ_jM%zCX5X;J%-*%&jVI?6KpYyzGbq7qf;&hFprh?E5Wyo=bZ) z8YNycvMNGp1836!-?nihm6jI`^C`EeGryoNZO1AFTQhzFJOA%Q{X(sMYlzABt!&f{ zoDENSuoJQIg5Q#@BUsNJX2h>jkdx4<+ipUymWKFr;w+s>$laIIkfP6nU}r+?J9bZg zUIxz>RX$kX=C4m(zh-Eg$BsJ4OL&_J38PbHW&7JmR27%efAkqqdvf)Am)VF$+U3WR z-E#I9H6^)zHLKCs7|Zs<7Bo9VCS3@CDQ;{UTczoEprCKL3ZZW!ffmZFkcWU-V|_M2 zUA9~8tE9<5`59W-UgUmDFp11YlORl3mS3*2#ZHjv{*-1#uMV_oVTy{PY(}AqZv#wF zJVks)%N6LaHF$$<6p8S8Lqn+5&t}DmLKiC~lE{jPZ39oj{wR&fe*LX-z0m}9ZnZ{U z>3-5Bh{KKN^n5i!M79Aw5eY=`6fG#aW1_ZG;fw7JM69qk^*(rmO{|Z6rXy?l=K=#_ zE-zd*P|(sskasO(cZ5L~_{Mz&Y@@@Q)5_8l<6vB$@226O+pDvkFaK8b>%2 zfMtgJ@+cN@w>3)(_uR;s8$sGONbYvoEZ3-)zZk4!`tNzd<0lwt{RAgplo*f@Z)uO` zzd`ljSqKfHJOLxya4_}T`k5Ok1Mpo#MSqf~&ia3uIy{zyuaF}pV6 z)@$ZG5LYh8Gge*LqM_|GiT1*J*uKes=Oku_gMj&;FS`*sfpM+ygN&yOla-^WtIU#$ zuw(_-?DS?6DY7IbON7J)p^IM?N>7x^3)(7wR4PZJu(teex%l>zKAUSNL@~{czc}bR z)I{XzXqZBU3a;7UQ~PvAx8g-3q-9AEd}1JrlfS8NdPc+!=HJ6Bs( zCG!0;e0z-22(Uzw>hkEmC&xj?{0p|kc zM}MMXCF%RLLa#5jG`+}{pDL3M&|%3BlwOi?dq!)KUdv5__zR>u^o|QkYiqr(m3HxF z6J*DyN#Jpooc$ok=b7{UAVM@nwGsr6kozSddwulf5g1{B=0#2)zv!zLXQup^BZ4sv*sEsn)+MA?t zEL)}3*R?4(J~CpeSJPM!oZ~8;8s_=@6o`IA%{aEA9!GELRvOuncE`s7sH91 zmF=+T!Q6%){?lJn3`5}oW31(^Of|$r%`~gT{eimT7R~*Mg@x+tWM3KE>=Q>nkMG$U za7r>Yz2LEaA|PsMafvJ(Y>Xzha?=>#B!sYfVob4k5Orb$INFdL@U0(J8Hj&kgWUlO zPm+R07E+oq^4f4#HvEPANGWLL_!uF{nkHYE&BCH%l1FL_r(Nj@M)*VOD5S42Gk-yT z^23oAMvpA57H(fkDGMx86Z}rtQhR^L!T2iS!788E z+^${W1V}J_NwdwdxpXAW8}#6o1(Uu|vhJvubFvQIH1bDl4J4iDJ+181KuDuHwvM?` z%1@Tnq+7>p{O&p=@QT}4wT;HCb@i)&7int<0#bj8j0sfN3s6|a(l7Bj#7$hxX@~iP z1HF8RFH}irky&eCN4T94VyKqGywEGY{Gt0Xl-`|dOU&{Q;Ao;sL>C6N zXx1y^RZSaL-pG|JN;j9ADjo^XR}gce#seM4QB1?S`L*aB&QlbBIRegMnTkTCks7JU z<0(b+^Q?HN1&$M1l&I@>HMS;!&bb()a}hhJzsmB?I`poqTrSoO>m_JE5U4=?o;OV6 zBZjt;*%1P>%2{UL=;a4(aI>PRk|mr&F^=v6Fr&xMj8fRCXE5Z2qdre&;$_RNid5!S zm^XiLK25G6_j4dWkFqjtU7#s;b8h?BYFxV?OE?c~&ME`n`$ix_`mb^AWr+{M9{^^Rl;~KREplwy2q;&xe zUR0SjHzKVYzuqQ84w$NKVPGVHL_4I)Uw<$uL2-Ml#+5r2X{LLqc*p13{;w#E*Kwb*1D|v?e;(<>vl@VjnFB^^Y;;b3 z=R@(uRj6D}-h6CCOxAdqn~_SG=bN%^9(Ac?zfRkO5x2VM0+@_qk?MDXvf=@q_* z3IM@)er6-OXyE1Z4sU3{8$Y$>8NcnU-nkyWD&2ZaqX1JF_JYL8y}>@V8A5%lX#U3E zet5PJM`z79q9u5v(OE~{by|Jzlw2<0h`hKpOefhw=fgLTY9M8h+?37k@TWpzAb2Fc zQMf^aVf!yXlK?@5d-re}!fuAWu0t57ZKSSacwRGJ$0uC}ZgxCTw>cjRk*xCt%w&hh zoeiIgdz__&u~8s|_TZsGvJ7sjvBW<(C@}Y%#l_ID2&C`0;Eg2Z+pk;IK}4T@W6X5H z`s?ayU-iF+aNr5--T-^~K~p;}D(*GWOAYDV9JEw!w8ZYzS3;W6*_`#aZw&9J ziXhBKU3~zd$kKzCAP-=t&cFDeQR*_e*(excIUxKuD@;-twSlP6>wWQU)$|H3Cy+`= z-#7OW!ZlYzZxkdQpfqVDFU3V2B_-eJS)Fi{fLtRz!K{~7TR~XilNCu=Z;{GIf9KYz zf3h=Jo+1#_s>z$lc~e)l93h&RqW1VHYN;Yjwg#Qi0yzjN^M4cuL>Ew`_-_wRhi*!f zLK6vTpgo^Bz?8AsU%#n}^EGigkG3FXen3M;hm#C38P@Zs4{!QZPAU=m7ZV&xKI_HWNt90Ef zxClm)ZY?S|n**2cNYy-xBlLAVZ=~+!|7y`(fh+M$#4zl&T^gV8ZaG(RBD!`3?9xcK zp2+aD(T%QIgrLx5au&TjG1AazI;`8m{K7^!@m>uGCSR;Ut{&?t%3AsF{>0Cm(Kf)2 z?4?|J+!BUg*P~C{?mwPQ#)gDMmro20YVNsVx5oWQMkzQ? zsQ%Y>%7_wkJqnSMuZjB9lBM(o zWut|B7w48cn}4buUBbdPBW_J@H7g=szrKEpb|aE>!4rLm+sO9K%iI75y~2HkUo^iw zJ3se$8$|W>3}?JU@3h@M^HEFNmvCp|+$-0M?RQ8SMoZ@38%!tz8f8-Ptb@106heiJ z^Bx!`0=Im z1!NUhO=9ICM*+||b3a7w*Y#5*Q}K^ar+oMMtekF0JnO>hzHqZKH0&PZ^^M(j;vwf_ z@^|VMBpcw8;4E-9J{(u7sHSyZpQbS&N{VQ%ZCh{c1UA5;?R} z+52*X_tkDQ(s~#-6`z4|Y}3N#a&dgP4S_^tsV=oZr4A1 zaSoPN1czE(UIBrC_r$0HM?RyBGe#lTBL4~JW#A`P^#0wuK)C-2$B6TvMi@@%K@JAT_IB^T7Zfqc8?{wHcSVG_?{(wUG%zhCm=%qP~EqeqKI$9UivF zv+5IUOs|%@ypo6b+i=xsZ=^G1yeWe)z6IX-EC`F=(|_GCNbHbNp(CZ*lpSu5n`FRA zhnrc4w+Vh?r>her@Ba_jv0Omp#-H7avZb=j_A~B%V0&FNi#!S8cwn0(Gg-Gi_LMI{ zCg=g@m{W@u?GQ|yp^yENd;M=W2s-k7Gw2Z(tsD5fTGF{iZ%Ccgjy6O!AB4x z%&=6jB7^}pyftW2YQpOY1w@%wZy%}-l0qJlOSKZXnN2wo3|hujU+-U~blRF!^;Tan z0w;Srh0|Q~6*tXf!5-rCD)OYE(%S|^WTpa1KHtpHZ{!;KdcM^#g8Z^+LkbiBHt85m z;2xv#83lWB(kplfgqv@ZNDcHizwi4-8+WHA$U-HBNqsZ`hKcUI3zV3d1ngJP-AMRET*A{> zb2A>Fk|L|WYV;Eu4>{a6ESi2r3aZL7x}eRc?cf|~bP)6b7%BnsR{Sa>K^0obn?yiJ zCVvaZ&;d_6WEk${F1SN0{_`(#TuOOH1as&#&xN~+JDzX(D-WU_nLEI}T_VaeLA=bc zl_UZS$nu#C1yH}YV>N2^9^zye{rDrn(rS99>Fh&jtNY7PP15q%g=RGnxACdCov47= zwf^9zfJaL{y`R#~tvVL#*<`=`Qe zj_@Me$6sIK=LMFbBrJps7vdaf_HeX?eC+P^{AgSvbEn?n<}NDWiQGQG4^ZOc|GskK z$Ve2_n8gQ-KZ=s(f`_X!+vM5)4+QmOP()2Fe#IL2toZBf+)8gTVgDSTN1CkP<}!j7 z0SEl>PBg{MnPHkj4wj$mZ?m5x!1ePVEYI(L_sb0OZ*=M%yQb?L{UL(2_*CTVbRxBe z@{)COwTK1}!*CK0Vi4~AB;HF(MmQf|dsoy(eiQ>WTKcEQlnKOri5xYsqi61Y=I4kzAjn5~{IWrz_l))|Ls zvq7xgQs?Xx@`N?f7+3XKLyD~6DRJw*uj*j?yvT3}a;(j_?YOe%hUFcPGWRVBXzpMJ zM43g6DLFqS9tcTLSg=^&N-y0dXL816v&-nqC0iXdg7kV|PY+js`F8dm z2PuHw&k+8*&9SPQ6f!^5q0&AH(i+z3I7a?8O+S5`g)>}fG|BM&ZnmL;rk)|u{1!aZ zEZHpAMmK_v$GbrrWNP|^2^s*!0waLW=-h5PZa-4jWYUt(Hr@EA(m3Mc3^uDxwt-me^55FMA9^>hpp26MhqjLg#^Y7OIJ5%ZLdNx&uDgIIqc zZRZl|n6TyV)0^DDyVtw*jlWkDY&Gw4q;k!UwqSL6&sW$B*5Rc?&)dt29bDB*b6IBY z6SY6Unsf6AOQdEf=P1inu6(6hVZ0~v-<>;LAlcQ2u?wRWj5VczBT$Op#8IhppP-1t zfz5H59Aa~yh7EN;BXJsLyjkjqARS5iIhDVPj<=4AJb}m6M@n{xYj3qsR*Q8;hVxDyC4vLI;;?^eENOb5QARj#nII5l$MtBCI@5u~(ylFi$ zw6-+$$XQ}Ca>FWT>q{k)g{Ml(Yv=6aDfe?m|5|kbGtWS}fKWI+})F6`x@||0oJ^(g|+xi zqlPdy5;`g*i*C=Q(aGeDw!eQg&w>UUj^{o?PrlFI=34qAU2u@BgwrBiaM8zoDTFJ< zh7nWpv>dr?q;4ZA?}V}|7qWz4W?6#S&m>hs4IwvCBe@-C>+oohsQZ^JC*RfDRm!?y zS4$7oxcI|##ga*y5hV>J4a%HHl^t$pjY%caL%-FlRb<$A$E!ws?8hf0@(4HdgQ!@> zds{&g$ocr9W4I84TMa9-(&^_B*&R%^=@?Ntxi|Ejnh;z=!|uVj&3fiTngDPg=0=P2 zB)3#%HetD84ayj??qrxsd9nqrBem(8^_u_UY{1@R_vK-0H9N7lBX5K(^O2=0#TtUUGSz{ z%g>qU8#a$DyZ~EMa|8*@`GOhCW3%DN%xuS91T7~iXRr)SG`%=Lfu%U~Z_`1b=lSi?qpD4$vLh$?HU6t0MydaowUpb zQr{>_${AMesCEffZo`}K0^~x>RY_ZIG{(r39MP>@=aiM@C;K)jUcfQV8#?SDvq>9D zI{XeKM%$$XP5`7p3K0T}x;qn)VMo>2t}Ib(6zui;k}<<~KibAb%p)**e>ln<=qyWU zrRDy|UXFi9y~PdEFIAXejLA{K)6<)Q`?;Q5!KsuEw({!#Rl8*5_F{TP?u|5(Hijv( ztAA^I5+$A*+*e0V0R~fc{ET-RAS3suZ}TRk3r)xqj~g_hxB`qIK5z(5wxYboz%46G zq{izIz^5xW1Vq#%lhXaZL&)FJWp0VZNO%2&ADd?+J%K$fM#T_Eke1{dQsx48dUPUY zLS+DWMJeUSjYL453f@HpRGU6Dv)rw+-c6xB>(=p4U%}_p>z^I@Ow9`nkUG21?cMIh9}hN?R-d)*6%pr6d@mcb*ixr7 z)>Lo<&2F}~>WT1ybm^9UO{6P9;m+fU^06_$o9gBWL9_}EMZFD=rLJ~&e?fhDnJNBI zKM=-WR6g7HY5tHf=V~6~QIQ~rakNvcsamU8m28YE=z8+G7K=h%)l6k zmCpiDInKL6*e#)#Pt;ANmjf`8h-nEt&d}(SBZMI_A{BI#ck-_V7nx)K9_D9K-p@?Zh81#b@{wS?wCcJ%og)8RF*-0z+~)6f#T` zWqF7_CBcnn=S-1QykC*F0YTsKMVG49BuKQBH%WuDkEy%E?*x&tt%0m>>5^HCOq|ux zuvFB)JPR-W|%$24eEC^AtG3Gp4qdK%pjRijF5Sg3X}uaKEE z-L5p5aVR!NTM8T`4|2QA@hXiLXRcJveWZ%YeFfV%mO5q#($TJ`*U>hicS+CMj%Ip# zivoL;dd*araeJK9EA<(tihD50FHWbITBgF9E<33A+eMr2;cgI3Gg6<-2o|_g9|> zv5}i932( zYfTE9?4#nQhP@a|zm#9FST2 z!y+p3B;p>KkUzH!K;GkBW}bWssz)9b>Ulg^)EDca;jDl+q=243BddS$hY^fC6lbpM z(q_bo4V8~eVeA?0LFD6ZtKcmOH^75#q$Eo%a&qvE8Zsqg=$p}u^|>DSWUP5i{6)LAYF4E2DfGZuMJ zMwxxmkxQf}Q$V3&2w|$`9_SQS^2NVbTHh;atB>=A%!}k-f4*i$X8m}Ni^ppZXk5_oYF>Gq(& z0wy{LjJOu}69}~#UFPc;$7ka+=gl(FZCy4xEsk);+he>Nnl>hb5Ud-lj!CNicgd^2 z_Qgr_-&S7*#nLAI7r()P$`x~fy)+y=W~6aNh_humoZr7MWGSWJPLk}$#w_1n%(@? z3FnHf1lbxKJbQ9c&i<$(wd{tUTX6DAKs@cXIOBv~!9i{wD@*|kwfX~sjKASrNFGvN zrFc=!0Bb^OhR2f`%hrp2ibv#KUxl)Np1aixD9{^o=)*U%n%rTHX?FSWL^UGpHpY@7 z74U}KoIRwxI#>)Pn4($A`nw1%-D}`sGRZD8Z#lF$6 zOeA5)+W2qvA%m^|$WluUU-O+KtMqd;Pd58?qZj})MbxYGO<{z9U&t4D{S2G>e+J9K ztFZ?}ya>SVOLp9hpW)}G%kTrg*KXXXsLkGdgHb+R-ZXqdkdQC0_)`?6mqo8(EU#d( zy;u&aVPe6C=YgCRPV!mJ6R6kdY*`e+VGM~`VtC>{k27!9vAZT)x2~AiX5|m1Rq}_= z;A9LX^nd$l-9&2%4s~p5r6ad-siV`HtxKF}l&xGSYJmP=z!?Mlwmwef$EQq~7;#OE z)U5eS6dB~~1pkj#9(}T3j!((8Uf%!W49FfUAozijoxInUE7z`~U3Y^}xc3xp){#9D z<^Tz2xw}@o@fdUZ@hnW#dX6gDOj4R8dV}Dw`u!h@*K)-NrxT8%2`T}EvOImNF_N1S zy?uo6_ZS>Qga4Xme3j#aX+1qdFFE{NT0Wfusa$^;eL5xGE_66!5_N8!Z~jCAH2=${ z*goHjl|z|kbmIE{cl-PloSTtD+2=CDm~ZHRgXJ8~1(g4W=1c3=2eF#3tah7ho`zm4 z05P&?nyqq$nC?iJ-nK_iBo=u5l#|Ka3H7{UZ&O`~t-=triw=SE7ynzMAE{Mv-{7E_ zViZtA(0^wD{iCCcg@c{54Ro@U5p1QZq_XlEGtdBAQ9@nT?(zLO0#)q55G8_Ug~Xnu zR-^1~hp|cy&52iogG@o?-^AD8Jb^;@&Ea5jEicDlze6%>?u$-eE};bQ`T6@(bED0J zKYtdc?%9*<<$2LCBzVx9CA4YV|q-qg*-{yQ;|0=KIgI6~z0DKTtajw2Oms3L zn{C%{P`duw!(F@*P)lFy11|Z&x`E2<=$Ln38>UR~z6~za(3r;45kQK_^QTX%!s zNzoIFFH8|Y>YVrUL5#mgA-Jh>j7)n)5}iVM4%_@^GSwEIBA2g-;43* z*)i7u*xc8jo2z8&=8t7qo|B-rsGw)b8UXnu`RgE4u!(J8yIJi(5m3~aYsADcfZ!GG zzqa7p=sg`V_KjiqI*LA-=T;uiNRB;BZZ)~88 z`C%p8%hIev2rxS12@doqsrjgMg3{A&N8A?%Ui5vSHh7!iC^ltF&HqG~;=16=h0{ygy^@HxixUb1XYcR36SB}}o3nxu z_IpEmGh_CK<+sUh@2zbK9MqO!S5cao=8LSQg0Zv4?ju%ww^mvc0WU$q@!oo#2bv24 z+?c}14L2vlDn%Y0!t*z=$*a!`*|uAVu&NO!z_arim$=btpUPR5XGCG0U3YU`v>yMr z^zmTdcEa!APX zYF>^Q-TP11;{VgtMqC}7>B^2gN-3KYl33gS-p%f!X<_Hr?`rG8{jb9jmuQA9U;BeG zHj6Pk(UB5c6zwX%SNi*Py*)gk^?+729$bAN-EUd*RKN7{CM4`Q65a1qF*-QWACA&m zrT)B(M}yih{2r!Tiv5Y&O&=H_OtaHUz96Npo_k0eN|!*s2mLe!Zkuv>^E8Xa43ZwH zOI058AZznYGrRJ+`*GmZzMi6yliFmGMge6^j?|PN%ARns!Eg$ufpcLc#1Ns!1@1 zvC7N8M$mRgnixwEtX{ypBS^n`k@t2cCh#_6L6WtQb8E~*Vu+Rr)YsKZRX~hzLG*BE zaeU#LPo?RLm(Wzltk79Jd1Y$|6aWz1)wf1K1RtqS;qyQMy@H@B805vQ%wfSJB?m&&=^m4i* zYVH`zTTFbFtNFkAI`Khe4e^CdGZw;O0 zqkQe2|NG_y6D%h(|EZNf&77_!NU%0y={^E=*gKGQ=)LdKPM3zUlM@otH2X07Awv8o zY8Y7a1^&Yy%b%m{mNQ5sWNMTIq96Wtr>a(hL>Qi&F(ckgKkyvM0IH<_}v~Fv-GqDapig=3*ZMOx!%cYY)SKzo7ECyem z9Mj3C)tCYM?C9YIlt1?zTJXNOo&oVxu&uXKJs7i+j8p*Qvu2PAnY}b`KStdpi`trk ztAO}T8eOC%x)mu+4ps8sYZ=vYJp16SVWEEgQyFKSfWQ@O5id6GfL`|2<}hMXLPszS zgK>NWOoR zBRyKeUPevpqKKShD|MZ`R;~#PdNMB3LWjqFKNvH9k+;(`;-pyXM55?qaji#nl~K8m z_MifoM*W*X9CQiXAOH{cZcP0;Bn10E1)T@62Um>et2ci!J2$5-_HPy(AGif+BJpJ^ ziHWynC_%-NlrFY+(f7HyVvbDIM$5ci_i3?22ZkF>Y8RPBhgx-7k3M2>6m5R24C|~I z&RPh9xpMGzhN4bii*ryWaN^d(`0 zTOADlU)g`1p+SVMNLztd)c+;XjXox(VHQwqzu>FROvf0`s&|NEv26}(TAe;@=FpZq zaVs6mp>W0rM3Qg*6x5f_bPJd!6dQGmh?&v0rpBNfS$DW-{4L7#_~-eA@7<2BsZV=X zow){3aATmLZOQrs>uzDkXOD=IiX;Ue*B(^4RF%H zeaZ^*MWn4tBDj(wj114r(`)P96EHq4th-;tWiHhkp2rDlrklX}I@ib-nel0slFoQO zOeTc;Rh7sMIebO`1%u)=GlEj+7HU;c|Nj>2j)J-kpR)s3#+9AiB zd$hAk6;3pu9(GCR#)#>aCGPYq%r&i02$0L9=7AlIGYdlUO5%eH&M!ZWD&6^NBAj0Y9ZDcPg@r@8Y&-}e!aq0S(`}NuQ({;aigCPnq75U9cBH&Y7 ze)W0aD>muAepOKgm7uPg3Dz7G%)nEqTUm_&^^3(>+eEI;$ia`m>m0QHEkTt^=cx^JsBC68#H(3zc~Z$E9I)oSrF$3 zUClHXhMBZ|^1ikm3nL$Z@v|JRhud*IhOvx!6X<(YSX(9LG#yYuZeB{=7-MyPF;?_8 zy2i3iVKG2q!=JHN>~!#Bl{cwa6-yB@b<;8LSj}`f9pw7#x3yTD>C=>1S@H)~(n_K4 z2-yr{2?|1b#lS`qG@+823j;&UE5|2+EdU4nVw5=m>o_gj#K>>(*t=xI7{R)lJhLU{ z4IO6!x@1f$aDVIE@1a0lraN9!(j~_uGlks)!&davUFRNYHflp<|ENwAxsp~4Hun$Q z$w>@YzXp#VX~)ZP8`_b_sTg(Gt7?oXJW%^Pf0UW%YM+OGjKS}X`yO~{7WH6nX8S6Z ztl!5AnM2Lo*_}ZLvo%?iV;D2z>#qdpMx*xY2*GGlRzmHCom`VedAoR=(A1nO)Y>;5 zCK-~a;#g5yDgf7_phlkM@)C8s!xOu)N2UnQhif-v5kL$*t=X}L9EyBRq$V(sI{90> z=ghTPGswRVbTW@dS2H|)QYTY&I$ljbpNPTc_T|FEJkSW7MV!JM4I(ksRqQ8)V5>}v z2Sf^Z9_v;dKSp_orZm09jb8;C(vzFFJgoYuWRc|Tt_&3k({wPKiD|*m!+za$(l*!gNRo{xtmqjy1=kGzFkTH=Nc>EL@1Um0BiN1)wBO$i z6rG={bRcT|%A3s3xh!Bw?=L&_-X+6}L9i~xRj2}-)7fsoq0|;;PS%mcn%_#oV#kAp zGw^23c8_0~ ze}v9(p};6HM0+qF5^^>BBEI3d=2DW&O#|(;wg}?3?uO=w+{*)+^l_-gE zSw8GV=4_%U4*OU^hibDV38{Qb7P#Y8zh@BM9pEM_o2FuFc2LWrW2jRRB<+IE)G=Vx zuu?cp2-`hgqlsn|$nx@I%TC!`>bX^G00_oKboOGGXLgyLKXoo$^@L7v;GWqfUFw3< zekKMWo0LR;TaFY}Tt4!O$3MU@pqcw!0w0 zA}SnJ6Lb597|P5W8$OsEHTku2Kw9y4V=hx*K%iSn!#LW9W#~OiWf^dXEP$^2 zaok=UyGwy3GRp)bm6Gqr>8-4h@3=2`Eto2|JE6Sufh?%U6;ut1v1d@#EfcQP2chCt z+mB{Bk5~()7G>wM3KYf7Xh?LGbwg1uWLotmc_}Z_o;XOUDyfU?{9atAT$={v82^w9 z(MW$gINHt4xB3{bdbhRR%T}L?McK?!zkLK3(e>zKyei(yq%Nsijm~LV|9mll-XHavFcc$teX7v);H>=oN-+E_Q{c|! zp

      JV~-9AH}jxf6IF!PxrB9is{_9s@PYth^`pb%DkwghLdAyDREz(csf9)HcVRq z+2Vn~>{(S&_;bq_qA{v7XbU?yR7;~JrLfo;g$Lkm#ufO1P`QW_`zWW+4+7xzQZnO$ z5&GyJs4-VGb5MEDBc5=zxZh9xEVoY(|2yRv&!T7LAlIs@tw+4n?v1T8M>;hBv}2n) zcqi+>M*U@uY>4N3eDSAH2Rg@dsl!1py>kO39GMP#qOHipL~*cCac2_vH^6x@xmO|E zkWeyvl@P$2Iy*mCgVF+b{&|FY*5Ygi8237i)9YW#Fp& z?TJTQW+7U)xCE*`Nsx^yaiJ0KSW}}jc-ub)8Z8x(|K7G>`&l{Y&~W=q#^4Gf{}aJ%6kLXsmv6cr=Hi*uB`V26;dr4C$WrPnHO>g zg1@A%DvIWPDtXzll39kY6#%j;aN7grYJP9AlJgs3FnC?crv$wC7S4_Z?<_s0j;MmE z75yQGul2=bY%`l__1X3jxju2$Ws%hNv75ywfAqjgFO7wFsFDOW^)q2%VIF~WhwEW0 z45z^+r+}sJ{q+>X-w(}OiD(!*&cy4X&yM`!L0Fe+_RUfs@=J{AH#K~gArqT=#DcGE z!FwY(h&+&811rVCVoOuK)Z<-$EX zp`TzcUQC256@YWZ*GkE@P_et4D@qpM92fWA6c$MV=^qTu7&g)U?O~-fUR&xFqNiY1 zRd=|zUs_rmFZhKI|H}dcKhy%Okl(#y#QuMi81zsY56Y@757xBQqDNkd+XhLQhp2BB zBF^aJ__D676wLu|yYo6jNJNw^B+Ce;DYK!f$!dNs1*?D^97u^jKS++7S z5qE%zG#HY-SMUn^_yru=T6v`)CM%K<>_Z>tPe|js`c<|y7?qol&)C=>uLWkg5 zmzNcSAG_sL)E9or;i+O}tY^70@h7+=bG1;YDlX{<4zF_?{)K5B&?^tKZ6<$SD%@>F zY0cl2H7)%zKeDX%Eo7`ky^mzS)s;842cP{_;dzFuyd~Npb4u!bwkkhf8-^C2e3`q8>MuPhgiv0VxHxvrN9_`rJv&GX0fWz-L-Jg^B zrTsm>)-~j0F1sV=^V?UUi{L2cp%YwpvHwwLaSsCIrGI#({{QfbgDxLKsUC6w@m?y} zg?l=7aMX-RnMxvLn_4oSB|9t;)Qf2%m-GKo_07?N1l^ahJ+Wf8C>h5~=-o1BJzV@5HBTB-ACNpsHnGt6_ku37M z{vIEB^tR=--4SEg{jfF=gEogtGwi&A$mwk7E+SV$$ZuU}#F3Y7t}o{!w4LJh8v4PW%8HfUK@dta#l*z@w*9Xzz(i)r#WXi`r1D#oBPtNM7M?Hkq zhhS1)ea5(6VY45|)tCTr*@yc$^Zc!zQzsNXU?aRN6mh7zVu~i=qTrX^>de+f6HYfDsW@6PBlw0CsDBcOWUmt&st>Z zYNJEsRCP1#g0+Htb=wITvexBY@fOpAmR7?szQNR~nM)?sPWIj)0)jG-EF8U@nnBaQZy z)ImpVYQL>lBejMDjlxA$#G4%y+^_>N;}r@Zoe2|u-9-x@vvD^ZWnV>Gm=pZa7REAf zOnomhCxBaGZgT+4kiE%aS&lH2sI1mSCM<%)Cr*Sli;#!aXcUb&@Z|Hj{VPsJyClqD%>hy`Y7z(GASs8Mqas3!D zSQE83*%uctlD|p%4)v`arra4y>yP5m25V*_+n)Ry1v>z_Fz!TV6t+N?x?#iH$q=m= z8&X{uW%LVRO87dVl=$Y*>dabJVq{o|Kx`7(D2$5DVX&}XGbg|Ua(*5b=;5qzW9;|w>m{hIO(Tu-z(ey8H=EMluJNyK4BJmGpX~ZM2O61 zk*O7js{-MBqwq>Urf0igN+6soGGc!Y?SP6hiXuJzZ1V4WZqE*?h;PG84gvG~dds6~484!kPM zMP87IP?dhdc;%|cS&LxY*Ib6P3%p|9)E3IgRmhhwtUR3eRK6iZ_6fiGW}jnL4(I|t ze`2yLvmuY42lNwO6>I#Son3$R4NOoP*WUm1R4jl#agtSLE}fSu-Z>{+*?pQIn7`s3LAzF#1pSxCAo?clr9 z9PUj#REq28*ZkJnxs$aK%8^5?P<_Q!#Z?%JH0FKVF;&zH3F#J^fz|ahl$Ycs~kFij_XP;U<`FcaDYyXYPM~&jEe1Xj1n;wyRdD;lmnq&FEro=;+Z$=v-&fYM9eK*S_D&oTXFW#b0 zRY}Y7R#bLzTfg9i7{s?=P9~qjA?$-U2p5;0?gPPu`1JY|*?*8IPO!eX>oiX=O#F!A zl`S%e5Y(csR1f)I(iKMf-;5%_rPP7h&}5Fc(8byKUH1*d7?9%QC|4aADj3L8yuo6GOv#%HDgU3bN(UHw1+(99&Om%f!DY(RYSf4&Uny% zH}*&rEXc$W5+eyeEg|I|E-HnkIO0!$1sV7Z&NXxiCZJ@`kH4eEi5}q~!Vv5qQq{MI zi4^`GYoUN-7Q(jy^SKXL4$G4K+FQXR)B}ee=pS0RyK=YC8c2bGnMA~rrOh&jd3_AT zxVaq37w^-;OU3+C`Kko-Z%l_2FC^maa=Ae0Fm@PEtXEg@cX*oka1Lt&h@jES<6?o1Oi1C9>}7+U(Ve zQ$=8RlzcnfCd59CsJ=gG^A!2Bb_PY~K2sSau{)?Ge03G7US&qrgV!3NUi>UHWZ*lo zS;~0--vn{ot+7UWMV{a(X3rZ8Z06Ps3$-sd|CWE(Y#l`swvcDbMjuReGsoA`rmZ`^ z=AaArdbeU0EtwnOuzq@u5P1rlZjH#gNgh6HIhG(>dX%4m{_!&DNTQE)8= zXD-vcpcSi|DSm3aUMnrV;DQY?svz?9*#GT$NXb~Hem=24iy>7xj367(!#RjnrHtrP-Q`T2W*PEvAR-=j ztY2|#<|JvHNVnM-tNdoS_yRSo=yFqukTZmB$|>Vclj)o=YzC9!ph8)ZOH5X=%Aq|9gNgc}^KFVLht!Lyw54v5u&D zW%vT%z`H{Ax>Ry+bD&QjHQke_wEA;oj(&E!s4|OURButQKSc7Ar-PzIiFa8F@ezkaY2J9&PH+VI1!G+{JgsQ7%da*_Gr!exT*OgJld)b-?cd)xI+|v_C`h(Cg`N~oj0`SQPTma z{@vc8L^D-rBXwS#00jT#@=-n1H-C3hvg61r2jx#ok&cr#BV~9JdPaVihyrGq*lb>bm$H6rIoc}ifaSn6mTD9% z$FRJxbNozOo6y}!OUci1VBv-7{TYZ4GkOM@46Y9?8%mSH9?l&lU59)T#Fjg(h%6I} z?ib zZ(xb8Rwr+vv>@$h{WglT2lL`#V=-9tP^c)cjvnz(g|VL^h8^CPVv12dE(o}WQ@0OP z^2-&ssBXP^#Oh`X5@F+~$PCB6kK-T7sFUK|>$lNDSkvAy%{y2qgq-&v zv}^&gm`wiYztWgMS<{^qQKYNV=>CQaOeglAY~EZvr}n~tW=yg)_+fzqF%~+*V_$3h z2hDW`e$qR;QMg?(wKE>%H_6ASS@6bkOi-m- zg6B7AzD;gBS1%OD7|47a%3BykN{w}P!Wn-nQOfpKUpx8Mk{$IO62D!%U9$kr!e%T> zlqQih?3(U&5%r!KZFZPdbwZ0laAJCj!c&pEFVzrH&_&i5m68Y_*J+-Qjlnz}Q{3oAD)`d14H zKUGmbwC|beC9Mtp>SbL~NVrlctU3WBpHz(UeIa~_{u^_4OaHs_LQt>bUwcyD`_Bbh zC=x|1vSjL)JvVHLw|xKynEvq2m)7O-6qdmjht7pZ*z|o%NA17v$9H*(5D5(MXiNo1 z72Tv}QASqr$!mY58s_Q{hHa9MY+QZ`2zX-FT@Kd?`8pczcV^9IeOKDG4WKqiP7N|S z+O977=VQTk8k5dafK`vd(4?_3pBdB?YG9*Z=R@y|$S+d%1sJf-Ka++I&v9hH)h#}} zw-MjQWJ?ME<7PR(G<1#*Z-&M?%=yzhQw$Lki(R+Pq$X~Q!9BO=fP9FyCIS8zE3n04 z8ScD%XmJnIv=pMTgt6VSxBXOZucndRE@7^aU0wefJYueY(Cb%?%0rz)zWEnsNsKhQ z+&o6d^x=R;Pt7fUa_`JVb1HPHYbXg{Jvux|atQ^bV#_|>7QZNC~P^IKUThB6{kvz2pr2*Cyxj zy37Nri8za8J!@Iw9rbt~#^<9zOaM8LOi$kPBcAGqPq-DB^-93Qeup{9@9&=zV6KQN zL)ic5S%n1!F(7b>MQ973$~<0|9MY-G!?wk?j-cQhMQlM2n{&7JoTBGsP;=fC6CBJn zxlpk^%x=B16rfb-W9pYV#9IRHQL9VG4?Uh>pN>2}0-MST2AB2pQjf*rT+TLCX-+&m z9I{ic2ogXoh=HwdI#igr(JC>>NUP|M>SA?-ux<2&>Jyx>Iko!B<3vS}{g*dKqxYW7 z0i`&U#*v)jot+keO#G&wowD!VvD(j`Z9a*-_RALKn0b(KnZ37d#Db7royLhBW~*7o zRa`=1fo9C4dgq;;R)JpP++a9^{xd)8``^fPW9!a%MCDYJc;3yicPs8IiQM>DhUX*; zeIrxE#JRrr|D$@bKgOm4C9D+e!_hQKj3LC`Js)|Aijx=J!rlgnpKeF>b+QlKhI^4* zf%Of^RmkW|xU|p#Lad44Y5LvIUIR>VGH8G zz7ZEIREG%UOy4)C!$muX6StM4@Fsh&Goa}cj10RL(#>oGtr6h~7tZDDQ_J>h)VmYlKK>9ns8w4tdx6LdN5xJQ9t-ABtTf_ zf1dKVv!mhhQFSN=ggf(#$)FtN-okyT&o6Ms+*u72Uf$5?4)78EErTECzweDUbbU)) zc*tt+9J~Pt%!M352Y5b`Mwrjn^Orp+)L_U1ORHJ}OUsB78YPcIRh4p5jzoDB7B*fb z4v`bouQeCAW#z9b1?4(M3dcwNn2F2plwC^RVHl#h&b-8n#5^o+Ll20OlJ^gOYiK2< z;MQuR!t!>`i}CAOa4a+Rh5IL|@kh4EdEL*O=3oGx4asg?XCTcUOQnmHs^6nLu6WcI zSt9q7nl*?2TIikKNb?3JZBo$cW6)b#;ZKzi+(~D-%0Ec+QW=bZZm@w|prGiThO3dy zU#TQ;RYQ+xU~*@Zj;Rf~z~iL8Da`RT!Z)b3ILBhnIl@VX9K0PSj5owH#*FJXX3vZ= zg_Zyn^G&l!WR6wN9GWvt)sM?g2^CA8&F#&t2z3_MiluRqvNbV{Me6yZ&X-_ zd6#Xdh%+6tCmSNTdCBusVkRwJ_A~<^Nd6~MNOvS;YDixM43`|8e_bmc*UWi7TLA})`T_F ztk&Nd=dgFUss#Ol$LXTRzP9l1JOSvAws~^X%(`ct$?2Im?UNpXjBec_-+8YK%rq#P zT9=h8&gCtgx?=Oj$Yr2jI3`VVuZ`lH>*N+*K11CD&>>F)?(`yr~54vHJftY*z?EorK zm`euBK<$(!XO%6-1=m>qqp6F`S@Pe3;pK5URT$8!Dd|;`eOWdmn916Ut5;iXWQoXE z0qtwxlH=m_NONP3EY2eW{Qwr-X1V3;5tV;g7tlL4BRilT#Y&~o_!f;*hWxWmvA;Pg zRb^Y$#PipnVlLXQIzKCuQP9IER0Ai4jZp+STb1Xq0w(nVn<3j(<#!vuc?7eJEZC<- zPhM7ObhgabN2`pm($tu^MaBkRLzx&jdh;>BP|^$TyD1UHt9Qvr{ZcBs^l!JI4~d-Py$P5QOYO&8eQOFe)&G zZm+?jOJioGs7MkkQBCzJSFJV6DiCav#kmdxc@IJ9j5m#&1)dhJt`y8{T!uxpBZ>&z zD^V~%GEaODak5qGj|@cA7HSH{#jHW;Q0KRdTp@PJO#Q1gGI=((a1o%X*{knz&_`ym zkRLikN^fQ%Gy1|~6%h^vx>ToJ(#aJDxoD8qyOD{CPbSvR*bC>Nm+mkw>6mD0mlD0X zGepCcS_x7+6X7dH;%e`aIfPr-NXSqlu&?$Br1R}3lSF2 zWOXDtG;v#EVLSQ!>4323VX-|E#qb+x%IxzUBDI~N23x? zXUHfTTV#_f9T$-2FPG@t)rpc9u9!@h^!4=fL^kg9 zVv%&KY3!?bU*V4X)wNT%Chr;YK()=~lc%$auOB_|oH`H)Xot@1cmk{^qdt&1C55>k zYnIkdoiAYW41zrRBfqR?9r^cpWIEqfS;|R#bIs4$cqA zoq~$yl8h{IXTSdSdH?;`ky6i%+Oc?HvwH+IS`%_a!d#CqQob9OTNIuhUnOQsX;nl_ z;1w99qO9lAb|guQ9?p4*9TmIZ5{su!h?v-jpOuShq!{AuHUYtmZ%brpgHl$BKLK_L z6q5vZodM$)RE^NNO>{ZWPb%Ce111V4wIX}?DHA=uzTu0$1h8zy!SID~m5t)(ov$!6 zB^@fP#vpx3enbrbX=vzol zj^Bg7V$Qa53#3Lptz<6Dz=!f+FvUBVIBtYPN{(%t(EcveSuxi3DI>XQ*$HX~O{KLK5Dh{H2ir87E^!(ye{9H&2U4kFxtKHkw zZPOTIa*29KbXx-U4hj&iH<9Z@0wh8B6+>qQJn{>F0mGnrj|0_{nwN}Vw_C!rm0!dC z>iRlEf}<+z&?Z4o3?C>QrLBhXP!MV0L#CgF{>;ydIBd5A{bd-S+VFn zLqq4a*HD%65IqQ5BxNz~vOGU=JJv|NG{OcW%2PU~MEfy6(bl#^TfT7+az5M-I`i&l z#g!HUfN}j#adA-21x7jbP6F;`99c8Qt|`_@u@fbhZF+Wkmr;IdVHj+F=pDb4MY?fU znDe##Hn){D}<>vVhYL#)+6p9eAT3T$?;-~bZU%l7MpPNh_mPc(h@79 z;LPOXk>e3nmIxl9lno5cI5G@Q!pE&hQ`s{$Ae4JhTebeTsj*|!6%0;g=wH?B1-p{P z`In#EP12q6=xXU)LiD+mLidPrYGHaKbe5%|vzApq9(PI6I5XjlGf<_uyy59iw8W;k zdLZ|8R8RWDc`#)n2?~}@5)vvksY9UaLW`FM=2s|vyg>Remm=QGthdNL87$nR&TKB*LB%*B}|HkG64 zZ|O4=Yq?Zwl>_KgIG@<8i{Zw#P3q_CVT7Dt zoMwoI)BkpQj8u(m!>1dfOwin(50}VNiLA>A2OG&TBXcP=H(3I;!WdPFe?r_e{%>bc6(Zk?6~Ew&;#ZxBJ| zAd1(sAHqlo_*rP;nTk)kAORe3cF&tj>m&LsvB)`-y9#$4XU=Dd^+CzvoAz%9216#f0cS`;kERxrtjbl^7pmO;_y zYBGOL7R1ne7%F9M2~0a7Srciz=MeaMU~ zV%Y#m_KV$XReYHtsraWLrdJItLtRiRo98T3J|x~(a>~)#>JHDJ z|4j!VO^qWQfCm9-$N29SpHUqvz62%#%98;2FNIF*?c9hZ7GAu$q>=0 zX_igPSK8Et(fmD)V=CvbtA-V(wS?z6WV|RX2`g=w=4D)+H|F_N(^ON!jHf72<2nCJ z^$hEygTAq7URR{Vq$)BsmFKTZ+i1i(D@SJuTGBN3W8{JpJ^J zkF=gBTz|P;Xxo1NIypGzJq8GK^#4tl)S%8$PP6E8c|GkkQ)vZ1OiB%mH#@hO1Z%Hp zv%2~Mlar^}7TRN-SscvQ*xVv+i1g8CwybQHCi3k;o$K@bmB%^-U8dILX)7b~#iPu@ z&D&W7YY2M3v`s(lNm2#^dCRFd;UYMUw1Rh2mto8laH1m`n0u;>okp5XmbsShOhQwo z@EYOehg-KNab)Rieib?m&NXls+&31)MB&H-zj_WmJsGjc1sCSOz0!2Cm1vV?y@kkQ z<1k6O$hvTQnGD*esux*aD3lEm$mUi0td0NiOtz3?7}h;Bt*vIC{tDBr@D)9rjhP^< zY*uKu^BiuSO%)&FL>C?Ng!HYZHLy`R>`rgq+lJhdXfo|df zmkzpQf{6o9%^|7Yb5v{Tu& zsP*Y~<#jK$S_}uEisRC;=y{zbq`4Owc@JyvB->nPzb#&vcMKi5n66PVV{Aub>*>q8 z=@u7jYA4Ziw2{fSED#t4QLD7Rt`au^y(Ggp3y(UcwIKtI(OMi@GHxs!bj$v~j(FZK zbdcP^gExtXQqQ8^Q#rHy1&W8q!@^aL>g1v2R45T(KErWB)1rB@rU`#n&-?g2Ti~xXCrexrLgajgzNy=N9|A6K=RZ zc3yk>w5sz1zsg~tO~-Ie?%Aplh#)l3`s632mi#CCl^75%i6IY;dzpuxu+2fliEjQn z&=~U+@fV4>{Fp=kk0oQIvBdqS#yY`Z+>Z|T&K{d;v3}=JqzKx05XU3M&@D5!uPTGydasyeZ5=1~IX-?HlM@AGB9|Mzb{{Dt@bUU8{KUPU@EX zv0fpQNvG~nD2WiOe{Vn=hE^rQD(5m+!$rs%s{w9;yg9oxRhqi0)rwsd245)igLmv* zJb@Xlet$+)oS1Ra#qTB@U|lix{Y4lGW-$5*4xOLY{9v9&RK<|K!fTd0wCKYZ)h&2f zEMcTCd+bj&YVmc#>&|?F!3?br3ChoMPTA{RH@NF(jmGMB2fMyW(<0jUT=8QFYD7-% zS0ydgp%;?W=>{V9>BOf=p$q5U511~Q0-|C!85)W0ov7eb35%XV;3mdUI@f5|x5C)R z$t?xLFZOv}A(ZjjSbF+8&%@RChpRvo>)sy>-IO8A@>i1A+8bZd^5J#(lgNH&A=V4V z*HUa0{zT{u-_FF$978RziwA@@*XkV{<-CE1N=Z!_!7;wq*xt3t((m+^$SZKaPim3K zO|Gq*w5r&7iqiQ!03SY{@*LKDkzhkHe*TzQaYAkz&jNxf^&A_-40(aGs53&}$dlKz zsel3=FvHqdeIf!UYwL&Mg3w_H?utbE_(PL9B|VAyaOo8k4qb>EvNYHrVmj^ocJQTf zL%4vl{qgmJf#@uWL@)WiB>Lm>?ivwB%uO|)i~;#--nFx4Kr6{TruZU0N_t_zqkg`? zwPFK|WiC4sI%o1H%$!1ANyq6_0OSPQJybh^vFriV=`S;kSsYkExZwB{68$dTODWJQ z@N57kBhwN(y~OHW_M}rX2W13cl@*i_tjW`TMfa~Y;I}1hzApXgWqag@(*@(|EMOg- z^qMk(s~dL#ps>>`oWZD=i1XI3(;gs7q#^Uj&L`gVu#4zn$i!BIHMoOZG!YoPO^=Gu z5`X-(KoSsHL77c<7^Y*IM2bI!dzg5j>;I@2-EeB$LgW|;csQTM&Z|R)q>yEjk@Sw% z6FQk*&zHWzcXalUJSoa&pgH24n`wKkg=2^ta$b1`(BBpBT2Ah9yQF&Kh+3jTaSE|=vChGz2_R^{$C;D`Ua(_=|OO11uLm;+3k%kO19EA`U065i;fRBoH z{Hq$cgHKRFPf0#%L?$*KeS@FDD;_TfJ#dwP7zzO5F>xntH(ONK{4)#jYUDQr6N(N< zp+fAS9l9)^c4Ss8628Zq5AzMq4zc(In_yJSXAT57Dtl}@= zvZoD7iq0cx7*#I{{r9m{%~g6@Hdr|*njKBb_5}mobCv=&X^`D9?;x6cHwRcwnlO^h zl;MiKr#LaoB*PELm8+8%btnC)b^E12!^ zMmVA!z>59e7n+^!P{PA?f9M^2FjKVw1%x~<`RY5FcXJE)AE}MTopGFDkyEjGiE|C6 z(ad%<3?v*?p;LJGopSEY18HPu2*}U!Nm|rfewc6(&y(&}B#j85d-5PeQ{}zg>>Rvl zDQ3H4E%q_P&kjuAQ>!0bqgAj){vzHpnn+h(AjQ6GO9v**l0|aCsCyXVE@uh?DU;Em zE*+7EU9tDH````D`|rM6WUlzBf1e{ht8$62#ilA6Dcw)qAzSRwu{czZJAcKv8w(Q6 zx)b$aq*=E=b5(UH-5*u)3iFlD;XQyklZrwHy}+=h6=aKtTriguHP@Inf+H@q32_LL z2tX|+X}4dMYB;*EW9~^5bydv)_!<%q#%Ocyh=1>FwL{rtZ?#2Scp{Q55%Fd-LgLU$ zM2u#|F{%vi%+O2^~uK3)?$6>9cc7_}F zWU72eFrzZ~x3ZIBH;~EMtD%51o*bnW;&QuzwWd$ds=O>Ev807cu%>Ac^ZK&7bCN;Ftk#eeQL4pG0p!W{Ri@tGw>nhIo`rC zi!Z6?70nYrNf92V{Y_i(a4DG=5>RktP=?%GcHEx?aKN$@{w{uj#Cqev$bXefo?yC6KI%Rol z%~$974WCymg;BBhd9Mv}_MeNro_8IB4!evgo*je4h?B-CAkEW-Wr-Q_V9~ef(znU& z{f-OHnj>@lZH(EcUb2TpOkc70@1BPiY0B#++1EPY5|UU?&^Vpw|C`k4ZWiB-3oAQM zgmG%M`2qDw5BMY|tG++34My2fE|^kvMSp(d+~P(Vk*d+RW1833i_bX^RYbg9tDtX` zox?y^YYfs-#fX|y7i(FN7js)66jN!`p9^r7oildEU#6J1(415H3h>W*p(p9@dI|c7 z&c*Aqzksg}o`D@i+o@WIw&jjvL!(`)JglV5zwMn)praO2M05H&CDeps0Wq8(8AkuE zPm|8MB6f0kOzg(gw}k>rzhQyo#<#sVdht~Wdk`y`=%0!jbd1&>Kxed8lS{Xq?Zw>* zU5;dM1tt``JH+A9@>H%-9f=EnW)UkRJe0+e^iqm0C5Z5?iEn#lbp}Xso ztleC}hl&*yPFcoCZ@sgvvjBA_Ew6msFml$cfLQY_(=h03WS_z+Leeh$M3#-?f9YT^Q($z z+pgaEv$rIa*9wST`WHASQio=9IaVS7l<87%;83~X*`{BX#@>>p=k`@FYo ze!K5_h8hOc`m0mK0p}LxsguM}w=9vw6Ku8y@RNrXSRPh&S`t4UQY=e-B8~3YCt1Fc zU$CtRW%hbcy{6K{>v0F*X<`rXVM3a{!muAeG$zBf`a(^l${EA9w3>J{aPwJT?mKVN2ba+v)Mp*~gQ_+Ws6= zy@D?85!U@VY0z9T=E9LMbe$?7_KIg)-R$tD)9NqIt84fb{B;f7C)n+B8)Cvo*F0t! zva6LeeC}AK4gL#d#N_HvvD& z0;mdU3@7%d5>h(xX-NBmJAOChtb(pX-qUtRLF5f$ z`X?Kpu?ENMc88>O&ym_$Jc7LZ> z#73|xJ|aa@l}PawS4Mpt9n)38w#q^P1w2N|rYKdcG;nb!_nHMZA_09L!j)pBK~e+j?tb-_A`wF8 zIyh>&%v=|n?+~h}%i1#^9UqZ?E9W!qJ0d0EHmioSt@%v7FzF`eM$X==#oaPESHBm@ zYzTXVo*y|C0~l_)|NF|F(If~YWJVkQAEMf5IbH{}#>PZpbXZU;+b^P8LWmlmDJ%Zu)4CajvRL!g_Faph`g0hpA2)D0|h zYy0h5+@4T81(s0D=crojdj|dYa{Y=<2zKp@xl&{sHO;#|!uTHtTey25f1U z#=Nyz{rJy#@SPk3_U|aALcg%vEjwIqSO$LZI59^;Mu~Swb53L+>oxWiN7J{;P*(2b@ao*aU~}-_j10 z@fQiaWnb}fRrHhNKrxKmi{aC#34BRP(a#0K>-J8D+v_2!~(V-6J%M@L{s?fU5ChwFfqn)2$siOUKw z?SmIRlbE8ot5P^z0J&G+rQ5}H=JE{FNsg`^jab7g-c}o`s{JS{-#}CRdW@hO`HfEp z1eR0DsN! zt5xmsYt{Uu;ZM`CgW)VYk=!$}N;w+Ct$Wf!*Z-7}@pA62F^1e$Ojz9O5H;TyT&rV( zr#IBM8te~-2t2;kv2xm&z%tt3pyt|s#vg2EOx1XkfsB*RM;D>ab$W-D6#Jdf zJ3{yD;P4=pFNk2GL$g~+5x;f9m*U2!ovWMK^U5`mAgBRhGpu)e`?#4vsE1aofu)iT zDm;aQIK6pNd8MMt@}h|t9c$)FT7PLDvu3e)y`otVe1SU4U=o@d!gn(DB9kC>Ac1wJ z?`{Hq$Q!rGb9h&VL#z+BKsLciCttdLJe9EmZF)J)c1MdVCrxg~EM80_b3k{ur=jVjrVhDK1GTjd3&t#ORvC0Q_&m|n>&TF1C_>k^8&ylR7oz#rG?mE%V| zepj0BlD|o?p8~LK_to`GINhGyW{{jZ{xqaO*SPvH)BYy1eH22DL_Kkn28N!0z3fzj z_+xZ3{ph_Tgkd)D$OjREak$O{F~mODA_D`5VsoobVnpxI zV0F_79%JB!?@jPs=cY73FhGuT!?fpVX1W=Wm zK5}i7(Pfh4o|Z{Ur=Y>bM1BDo2OdXBB(4Y#Z!61A8C6;7`6v-(P{ou1mAETEV?Nt< zMY&?ucJcJ$NyK0Zf@b;U#3ad?#dp`>zmNn=H1&-H`Y+)ai-TfyZJX@O&nRB*7j$ zDQF!q#a7VHL3z#Hc?Ca!MRbgL`daF zW#;L$yiQP|5VvgvRLluk3>-1cS+7MQ1)DC&DpYyS9j;!Rt$HdXK1}tG3G_)ZwXvGH zG;PB^f@CFrbEK4>3gTVj73~Tny+~k_pEHt|^eLw{?6NbG&`Ng9diB9XsMr(ztNC!{FhW8Hi!)TI`(Q|F*b z-z;#*c1T~kN67omP(l7)ZuTlxaC_XI(K8$VPfAzj?R**AMb0*p@$^PsN!LB@RYQ4U zA^xYY9sX4+;7gY%$i%ddfvneGfzbE4ZTJT5Vk3&1`?ULTy28&D#A&{dr5ZlZH&NTz zdfZr%Rw*Ukmgu@$C5$}QLOyb|PMA5syQns?iN@F|VFEvFPK321mTW^uv?GGNH6rnM zR9a2vB`}Y++T3Wumy$6`W)_c0PS*L;;0J^(T7<)`s{}lZVp`e)fM^?{$ zLbNw>N&6aw5Hlf_M)h8=)x0$*)V-w-Pw5Kh+EY{^$?#{v)_Y{9p5K{DjLnJ(ZUcyk*y(6D8wHB8=>Y)fb_Pw0v)Xybk`Sw@hNEaHP$-n`DtYP ziJyiauEXtuMpWyQjg$gdJR?e+=8w+=5GO-OT8pRaVFP1k^vI|I&agGjN-O*bJEK!M z`kt^POhUexh+PA&@And|vk-*MirW?>qB(f%y{ux z*d44UXxQOs+C`e-x4KSWhPg-!gO~kavIL8X3?!Ac2ih-dkK~Ua2qlcs1b-AIWg*8u z0QvL~51vS$LnmJSOnV4JUCUzg&4;bSsR5r_=FD@y|)Y2R_--e zMWJ;~*r=vJssF5_*n?wF0DO_>Mja=g+HvT=Yd^uBU|aw zRixHUQJX0Pgt-nFV+8&|;-n>!jNUj!8Y_YzH*%M!-_uWt6& z|Ec+lAD``i^do;u_?<(RpzsYZVJ8~}|NjUFgXltofbjhf!v&208g^#0h-x?`z8cInq!9kfVwJ|HQ;VK>p_-fn@(3q?e51Keq(=U-7C0#as-q z8Or}Ps07>O2@AAXz_%3bTOh{tKm#uRe}Sqr=w6-Wz$FCdfF3qNabEaj`-OfipxaL- zPh2R*l&%ZbcV?lv4C3+t2DAVSFaRo20^W_n4|0t(_*`?KmmUHG2sNZ*CRZlCFIyZbJqLdBCj)~%if)g|4NJr(8!R!E0iBbm$;`m;1n2@(8*E%B zH!g{hK|WK?1jUfM9zX?hlV#l%!6^p$$P+~rg}OdKg|d^Ed4WTY1$1J@WWHr$Os_(L z;-Zu1FJqhR4LrCUl)C~E7gA!^wtA6YIh10In9rX@LGSjnTPtLp+gPGp6u z3}{?J1!yT~?FwqT;O_-1%37f#4ek&DL){N}MX3RbNfRb-T;U^wXhx#De&QssA$lu~ mWkA_K7-+yz9tH*t6hj_Qg(_m7JaeTomk=)l!_+yTk^le-`GmOu delta 34176 zcmX7vV`H6d(}mmEwr$(CZQE$vU^m*aZQE(=WXEZ2+l}qF_w)XN>&rEBu9;)4>7EB0 zo(HR^Mh47P)@z^^pH!4#b(O8!;$>N+S+v5K5f8RrQ+Qv0_oH#e!pI2>yt4ij>fI9l zW&-hsVAQg%dpn3NRy$kb_vbM2sr`>bZ48b35m{D=OqX;p8A${^Dp|W&J5mXvUl#_I zN!~GCBUzj~C%K?<7+UZ_q|L)EGG#_*2Zzko-&Kck)Qd2%CpS3{P1co1?$|Sj1?E;PO z7alI9$X(MDly9AIEZ-vDLhpAKd1x4U#w$OvBtaA{fW9)iD#|AkMrsSaNz(69;h1iM1#_ z?u?O_aKa>vk=j;AR&*V-p3SY`CI}Uo%eRO(Dr-Te<99WQhi>y&l%UiS%W2m(d#woD zW?alFl75!1NiUzVqgqY98fSQNjhX3uZ&orB08Y*DFD;sjIddWoJF;S_@{Lx#SQk+9 zvSQ-620z0D7cy8-u_7u?PqYt?R0m2k%PWj%V(L|MCO(@3%l&pzEy7ijNv(VXU9byn z@6=4zL|qk*7!@QWd9imT9i%y}1#6+%w=s%WmsHbw@{UVc^?nL*GsnACaLnTbr9A>B zK)H-$tB`>jt9LSwaY+4!F1q(YO!E7@?SX3X-Ug4r($QrmJnM8m#;#LN`kE>?<{vbCZbhKOrMpux zTU=02hy${;n&ikcP8PqufhT9nJU>s;dyl;&~|Cs+o{9pCu{cRF+0{iyuH~6=tIZXVd zR~pJBC3Hf-g%Y|bhTuGyd~3-sm}kaX5=T?p$V?48h4{h2;_u{b}8s~Jar{39PnL7DsXpxcX#3zx@f9K zkkrw9s2*>)&=fLY{=xeIYVICff2Id5cc*~l7ztSsU@xuXYdV1(lLGZ5)?mXyIDf1- zA7j3P{C5s?$Y-kg60&XML*y93zrir8CNq*EMx)Kw)XA(N({9t-XAdX;rjxk`OF%4-0x?ne@LlBQMJe5+$Ir{Oj`@#qe+_-z!g5qQ2SxKQy1ex_x^Huj%u+S@EfEPP-70KeL@7@PBfadCUBt%`huTknOCj{ z;v?wZ2&wsL@-iBa(iFd)7duJTY8z-q5^HR-R9d*ex2m^A-~uCvz9B-1C$2xXL#>ow z!O<5&jhbM&@m=l_aW3F>vjJyy27gY}!9PSU3kITbrbs#Gm0gD?~Tub8ZFFK$X?pdv-%EeopaGB#$rDQHELW!8bVt`%?&>0 zrZUQ0!yP(uzVK?jWJ8^n915hO$v1SLV_&$-2y(iDIg}GDFRo!JzQF#gJoWu^UW0#? z*OC-SPMEY!LYY*OO95!sv{#-t!3Z!CfomqgzFJld>~CTFKGcr^sUai5s-y^vI5K={ z)cmQthQuKS07e8nLfaIYQ5f}PJQqcmokx?%yzFH*`%k}RyXCt1Chfv5KAeMWbq^2MNft;@`hMyhWg50(!jdAn;Jyx4Yt)^^DVCSu?xRu^$*&&=O6#JVShU_N3?D)|$5pyP8A!f)`| z>t0k&S66T*es5(_cs>0F=twYJUrQMqYa2HQvy)d+XW&rai?m;8nW9tL9Ivp9qi2-` zOQM<}D*g`28wJ54H~1U!+)vQh)(cpuf^&8uteU$G{9BUhOL| zBX{5E1**;hlc0ZAi(r@)IK{Y*ro_UL8Ztf8n{Xnwn=s=qH;fxkK+uL zY)0pvf6-iHfX+{F8&6LzG;&d%^5g`_&GEEx0GU=cJM*}RecV-AqHSK@{TMir1jaFf&R{@?|ieOUnmb?lQxCN!GnAqcii9$ z{a!Y{Vfz)xD!m2VfPH=`bk5m6dG{LfgtA4ITT?Sckn<92rt@pG+sk>3UhTQx9ywF3 z=$|RgTN<=6-B4+UbYWxfQUOe8cmEDY3QL$;mOw&X2;q9x9qNz3J97)3^jb zdlzkDYLKm^5?3IV>t3fdWwNpq3qY;hsj=pk9;P!wVmjP|6Dw^ez7_&DH9X33$T=Q{>Nl zv*a*QMM1-2XQ)O=3n@X+RO~S`N13QM81^ZzljPJIFBh%x<~No?@z_&LAl)ap!AflS zb{yFXU(Uw(dw%NR_l7%eN2VVX;^Ln{I1G+yPQr1AY+0MapBnJ3k1>Zdrw^3aUig*! z?xQe8C0LW;EDY(qe_P!Z#Q^jP3u$Z3hQpy^w7?jI;~XTz0ju$DQNc4LUyX}+S5zh> zGkB%~XU+L?3pw&j!i|x6C+RyP+_XYNm9`rtHpqxvoCdV_MXg847oHhYJqO+{t!xxdbsw4Ugn($Cwkm^+36&goy$vkaFs zrH6F29eMPXyoBha7X^b+N*a!>VZ<&Gf3eeE+Bgz7PB-6X7 z_%2M~{sTwC^iQVjH9#fVa3IO6E4b*S%M;#WhHa^L+=DP%arD_`eW5G0<9Tk=Ci?P@ z6tJXhej{ZWF=idj32x7dp{zmQY;;D2*11&-(~wifGXLmD6C-XR=K3c>S^_+x!3OuB z%D&!EOk;V4Sq6eQcE{UEDsPMtED*;qgcJU^UwLwjE-Ww54d73fQ`9Sv%^H>juEKmxN+*aD=0Q+ZFH1_J(*$~9&JyUJ6!>(Nj zi3Z6zWC%Yz0ZjX>thi~rH+lqv<9nkI3?Ghn7@!u3Ef){G(0Pvwnxc&(YeC=Kg2-7z zr>a^@b_QClXs?Obplq@Lq-l5>W);Y^JbCYk^n8G`8PzCH^rnY5Zk-AN6|7Pn=oF(H zxE#8LkI;;}K7I^UK55Z)c=zn7OX_XVgFlEGSO}~H^y|wd7piw*b1$kA!0*X*DQ~O` z*vFvc5Jy7(fFMRq>XA8Tq`E>EF35{?(_;yAdbO8rrmrlb&LceV%;U3haVV}Koh9C| zTZnR0a(*yN^Hp9u*h+eAdn)d}vPCo3k?GCz1w>OOeme(Mbo*A7)*nEmmUt?eN_vA; z=~2}K_}BtDXJM-y5fn^v>QQo+%*FdZQFNz^j&rYhmZHgDA-TH47#Wjn_@iH4?6R{J z%+C8LYIy>{3~A@|y4kN8YZZp72F8F@dOZWp>N0-DyVb4UQd_t^`P)zsCoygL_>>x| z2Hyu7;n(4G&?wCB4YVUIVg0K!CALjRsb}&4aLS|}0t`C}orYqhFe7N~h9XQ_bIW*f zGlDCIE`&wwyFX1U>}g#P0xRRn2q9%FPRfm{-M7;}6cS(V6;kn@6!$y06lO>8AE_!O z{|W{HEAbI0eD$z9tQvWth7y>qpTKQ0$EDsJkQxAaV2+gE28Al8W%t`Pbh zPl#%_S@a^6Y;lH6BfUfZNRKwS#x_keQ`;Rjg@qj zZRwQXZd-rWngbYC}r6X)VCJ-=D54A+81%(L*8?+&r7(wOxDSNn!t(U}!;5|sjq zc5yF5$V!;%C#T+T3*AD+A({T)#p$H_<$nDd#M)KOLbd*KoW~9E19BBd-UwBX1<0h9 z8lNI&7Z_r4bx;`%5&;ky+y7PD9F^;Qk{`J@z!jJKyJ|s@lY^y!r9p^75D)_TJ6S*T zLA7AA*m}Y|5~)-`cyB+lUE9CS_`iB;MM&0fX**f;$n($fQ1_Zo=u>|n~r$HvkOUK(gv_L&@DE0b4#ya{HN)8bNQMl9hCva zi~j0v&plRsp?_zR zA}uI4n;^_Ko5`N-HCw_1BMLd#OAmmIY#ol4M^UjLL-UAat+xA+zxrFqKc@V5Zqan_ z+LoVX-Ub2mT7Dk_ z<+_3?XWBEM84@J_F}FDe-hl@}x@v-s1AR{_YD!_fMgagH6s9uyi6pW3gdhauG>+H? zi<5^{dp*5-9v`|m*ceT&`Hqv77oBQ+Da!=?dDO&9jo;=JkzrQKx^o$RqAgzL{ zjK@n)JW~lzxB>(o(21ibI}i|r3e;17zTjdEl5c`Cn-KAlR7EPp84M@!8~CywES-`mxKJ@Dsf6B18_!XMIq$Q3rTDeIgJ3X zB1)voa#V{iY^ju>*Cdg&UCbx?d3UMArPRHZauE}c@Fdk;z85OcA&Th>ZN%}=VU%3b9={Q(@M4QaeuGE(BbZ{U z?WPDG+sjJSz1OYFpdImKYHUa@ELn%n&PR9&I7B$<-c3e|{tPH*u@hs)Ci>Z@5$M?lP(#d#QIz}~()P7mt`<2PT4oHH}R&#dIx4uq943D8gVbaa2&FygrSk3*whGr~Jn zR4QnS@83UZ_BUGw;?@T zo5jA#potERcBv+dd8V$xTh)COur`TQ^^Yb&cdBcesjHlA3O8SBeKrVj!-D3+_p6%P zP@e{|^-G-C(}g+=bAuAy8)wcS{$XB?I=|r=&=TvbqeyXiuG43RR>R72Ry7d6RS;n^ zO5J-QIc@)sz_l6%Lg5zA8cgNK^GK_b-Z+M{RLYk5=O|6c%!1u6YMm3jJg{TfS*L%2 zA<*7$@wgJ(M*gyTzz8+7{iRP_e~(CCbGB}FN-#`&1ntct@`5gB-u6oUp3#QDxyF8v zOjxr}pS{5RpK1l7+l(bC)0>M;%7L?@6t}S&a zx0gP8^sXi(g2_g8+8-1~hKO;9Nn%_S%9djd*;nCLadHpVx(S0tixw2{Q}vOPCWvZg zjYc6LQ~nIZ*b0m_uN~l{&2df2*ZmBU8dv`#o+^5p>D5l%9@(Y-g%`|$%nQ|SSRm0c zLZV)45DS8d#v(z6gj&6|ay@MP23leodS8-GWIMH8_YCScX#Xr)mbuvXqSHo*)cY9g z#Ea+NvHIA)@`L+)T|f$Etx;-vrE3;Gk^O@IN@1{lpg&XzU5Eh3!w;6l=Q$k|%7nj^ z|HGu}c59-Ilzu^w<93il$cRf@C(4Cr2S!!E&7#)GgUH@py?O;Vl&joXrep=2A|3Vn zH+e$Ctmdy3B^fh%12D$nQk^j|v=>_3JAdKPt2YVusbNW&CL?M*?`K1mK*!&-9Ecp~>V1w{EK(429OT>DJAV21fG z=XP=%m+0vV4LdIi#(~XpaUY$~fQ=xA#5?V%xGRr_|5WWV=uoG_Z&{fae)`2~u{6-p zG>E>8j({w7njU-5Lai|2HhDPntQ(X@yB z9l?NGoKB5N98fWrkdN3g8ox7Vic|gfTF~jIfXkm|9Yuu-p>v3d{5&hC+ZD%mh|_=* zD5v*u(SuLxzX~owH!mJQi%Z=ALvdjyt9U6baVY<88B>{HApAJ~>`buHVGQd%KUu(d z5#{NEKk6Vy08_8*E(?hqZe2L?P2$>!0~26N(rVzB9KbF&JQOIaU{SumX!TsYzR%wB z<5EgJXDJ=1L_SNCNZcBWBNeN+Y`)B%R(wEA?}Wi@mp(jcw9&^1EMSM58?68gwnXF` zzT0_7>)ep%6hid-*DZ42eU)tFcFz7@bo=<~CrLXpNDM}tv*-B(ZF`(9^RiM9W4xC%@ZHv=>w(&~$Wta%)Z;d!{J;e@z zX1Gkw^XrHOfYHR#hAU=G`v43E$Iq}*gwqm@-mPac0HOZ0 zVtfu7>CQYS_F@n6n#CGcC5R%4{+P4m7uVlg3axX}B(_kf((>W?EhIO&rQ{iUO$16X zv{Abj3ZApUrcar7Ck}B1%RvnR%uocMlKsRxV9Qqe^Y_5C$xQW@9QdCcF%W#!zj;!xWc+0#VQ*}u&rJ7)zc+{vpw+nV?{tdd&Xs`NV zKUp|dV98WbWl*_MoyzM0xv8tTNJChwifP!9WM^GD|Mkc75$F;j$K%Y8K@7?uJjq-w zz*|>EH5jH&oTKlIzueAN2926Uo1OryC|CmkyoQZABt#FtHz)QmQvSX35o`f z<^*5XXxexj+Q-a#2h4(?_*|!5Pjph@?Na8Z>K%AAjNr3T!7RN;7c)1SqAJfHY|xAV z1f;p%lSdE8I}E4~tRH(l*rK?OZ>mB4C{3e%E-bUng2ymerg8?M$rXC!D?3O}_mka? zm*Y~JMu+_F7O4T;#nFv)?Ru6 z92r|old*4ZB$*6M40B;V&2w->#>4DEu0;#vHSgXdEzm{+VS48 z7U1tVn#AnQ3z#gP26$!dmS5&JsXsrR>~rWA}%qd{92+j zu+wYAqrJYOA%WC9nZ>BKH&;9vMSW_59z5LtzS4Q@o5vcrWjg+28#&$*8SMYP z!l5=|p@x6YnmNq>23sQ(^du5K)TB&K8t{P`@T4J5cEFL@qwtsCmn~p>>*b=37y!kB zn6x{#KjM{S9O_otGQub*K)iIjtE2NfiV~zD2x{4r)IUD(Y8%r`n;#)ujIrl8Sa+L{ z>ixGoZJ1K@;wTUbRRFgnltN_U*^EOJS zRo4Y+S`cP}e-zNtdl^S5#%oN#HLjmq$W^(Y6=5tM#RBK-M14RO7X(8Gliy3+&9fO; zXn{60%0sWh1_g1Z2r0MuGwSGUE;l4TI*M!$5dm&v9pO7@KlW@j_QboeDd1k9!7S)jIwBza-V#1)(7ht|sjY}a19sO!T z2VEW7nB0!zP=Sx17-6S$r=A)MZikCjlQHE)%_Ka|OY4+jgGOw=I3CM`3ui^=o0p7u z?xujpg#dRVZCg|{%!^DvoR*~;QBH8ia6%4pOh<#t+e_u!8gjuk_Aic=|*H24Yq~Wup1dTRQs0nlZOy+30f16;f7EYh*^*i9hTZ`h`015%{i|4 z?$7qC3&kt#(jI#<76Biz=bl=k=&qyaH>foM#zA7}N`Ji~)-f-t&tR4^do)-5t?Hz_Q+X~S2bZx{t+MEjwy3kGfbv(ij^@;=?H_^FIIu*HP_7mpV)NS{MY-Rr7&rvWo@Wd~{Lt!8|66rq`GdGu% z@<(<7bYcZKCt%_RmTpAjx=TNvdh+ZiLkMN+hT;=tC?%vQQGc7WrCPIYZwYTW`;x|N zrlEz1yf95FiloUU^(onr3A3>+96;;6aL?($@!JwiQ2hO|^i)b4pCJ7-y&a~B#J`#FO!3uBp{5GBvM2U@K85&o0q~6#LtppE&cVY z3Bv{xQ-;i}LN-60B2*1suMd=Fi%Y|7@52axZ|b=Wiwk^5eg{9X4}(q%4D5N5_Gm)` zg~VyFCwfkIKW(@@ZGAlTra6CO$RA_b*yz#){B82N7AYpQ9)sLQfhOAOMUV7$0|d$=_y&jl>va$3u-H z_+H*|UXBPLe%N2Ukwu1*)kt!$Y>(IH3`YbEt; znb1uB*{UgwG{pQnh>h@vyCE!6B~!k}NxEai#iY{$!_w54s5!6jG9%pr=S~3Km^EEA z)sCnnau+ZY)(}IK#(3jGGADw8V7#v~<&y5cF=5_Ypkrs3&7{}%(4KM7) zuSHVqo~g#1kzNwXc39%hL8atpa1Wd#V^uL=W^&E)fvGivt)B!M)?)Y#Ze&zU6O_I?1wj)*M;b*dE zqlcwgX#eVuZj2GKgBu@QB(#LHMd`qk<08i$hG1@g1;zD*#(9PHjVWl*5!;ER{Q#A9 zyQ%fu<$U?dOW=&_#~{nrq{RRyD8upRi}c-m!n)DZw9P>WGs>o1vefI}ujt_`O@l#Z z%xnOt4&e}LlM1-0*dd?|EvrAO-$fX8i{aTP^2wsmSDd!Xc9DxJB=x1}6|yM~QQPbl z0xrJcQNtWHgt*MdGmtj%x6SWYd?uGnrx4{m{6A9bYx`m z$*UAs@9?3s;@Jl19%$!3TxPlCkawEk12FADYJClt0N@O@Pxxhj+Kk(1jK~laR0*KGAc7%C4nI^v2NShTc4#?!p{0@p0T#HSIRndH;#Ts0YECtlSR}~{Uck+keoJq6iH)(Zc~C!fBe2~4(Wd> zR<4I1zMeW$<0xww(@09!l?;oDiq zk8qjS9Lxv$<5m#j(?4VLDgLz;8b$B%XO|9i7^1M;V{aGC#JT)c+L=BgCfO5k>CTlI zOlf~DzcopV29Dajzt*OcYvaUH{UJPaD$;spv%>{y8goE+bDD$~HQbON>W*~JD`;`- zZEcCPSdlCvANe z=?|+e{6AW$f(H;BND>uy1MvQ`pri>SafK5bK!YAE>0URAW9RS8#LWUHBOc&BNQ9T+ zJpg~Eky!u!9WBk)!$Z?!^3M~o_VPERYnk1NmzVYaGH;1h+;st==-;jzF~2LTn+x*k zvywHZg7~=aiJe=OhS@U>1fYGvT1+jsAaiaM;) zay2xsMKhO+FIeK?|K{G4SJOEt*eX?!>K8jpsZWW8c!X|JR#v(1+Ey5NM^TB1n|_40 z@Db2gH}PNT+3YEyqXP8U@)`E|Xat<{K5K;eK7O0yV72m|b!o43!e-!P>iW>7-9HN7 zmmc7)JX0^lPzF#>$#D~nU^3f!~Q zQWly&oZEb1847&czU;dg?=dS>z3lJkADL1innNtE(f?~OxM`%A_PBp?Lj;zDDomf$ z;|P=FTmqX|!sHO6uIfCmh4Fbgw@`DOn#`qAPEsYUiBvUlw zevH{)YWQu>FPXU$%1!h*2rtk_J}qNkkq+StX8Wc*KgG$yH#p-kcD&)%>)Yctb^JDB zJe>=!)5nc~?6hrE_3n^_BE<^;2{}&Z>Dr)bX>H{?kK{@R)`R5lnlO6yU&UmWy=d03 z*(jJIwU3l0HRW1PvReOb|MyZT^700rg8eFp#p<3Et%9msiCxR+jefK%x81+iN0=hG z;<`^RUVU+S)Iv-*5y^MqD@=cp{_cP4`s=z)Ti3!Bf@zCmfpZTwf|>|0t^E8R^s`ad z5~tA?0x7OM{*D;zb6bvPu|F5XpF11`U5;b*$p zNAq7E6c=aUnq>}$JAYsO&=L^`M|DdSSp5O4LA{|tO5^8%Hf1lqqo)sj=!aLNKn9(3 zvKk($N`p`f&u+8e^Z-?uc2GZ_6-HDQs@l%+pWh!|S9+y3!jrr3V%cr{FNe&U6(tYs zLto$0D+2}K_9kuxgFSeQ!EOXjJtZ$Pyl_|$mPQ9#fES=Sw8L% zO7Jij9cscU)@W+$jeGpx&vWP9ZN3fLDTp zaYM$gJD8ccf&g>n?a56X=y zec%nLN`(dVCpSl9&pJLf2BN;cR5F0Nn{(LjGe7RjFe7efp3R_2JmHOY#nWEc2TMhMSj5tBf-L zlxP3sV`!?@!mRnDTac{35I7h@WTfRjRiFw*Q*aD8)n)jdkJC@)jD-&mzAdK6Kqdct8P}~dqixq;n zjnX!pb^;5*Rr?5ycT7>AB9)RED^x+DVDmIbHKjcDv2lHK;apZOc=O@`4nJ;k|iikKk66v4{zN#lmSn$lh z_-Y3FC)iV$rFJH!#mNqWHF-DtSNbI)84+VLDWg$ph_tkKn_6+M1RZ!)EKaRhY={el zG-i@H!fvpH&4~$5Q+zHU(Ub=;Lzcrc3;4Cqqbr$O`c5M#UMtslK$3r+Cuz>xKl+xW?`t2o=q`1djXC=Q6`3C${*>dm~I{ z(aQH&Qd{{X+&+-4{epSL;q%n$)NOQ7kM}ea9bA++*F+t$2$%F!U!U}(&y7Sd0jQMV zkOhuJ$+g7^kb<`jqFiq(y1-~JjP13J&uB=hfjH5yAArMZx?VzW1~>tln~d5pt$uWR~TM!lIg+D)prR zocU0N2}_WTYpU`@Bsi1z{$le`dO{-pHFQr{M}%iEkX@0fv!AGCTcB90@e|slf#unz z*w4Cf>(^XI64l|MmWih1g!kwMJiifdt4C<5BHtaS%Ra>~3IFwjdu;_v*7BL|fPu+c zNp687`{}e@|%)5g4U*i=0zlSWXzz=YcZ*&Bg zr$r(SH0V5a%oHh*t&0y%R8&jDI=6VTWS_kJ!^WN!ET@XfEHYG-T1jJsDd`yEgh!^* z+!P62=v`R2=TBVjt=h}|JIg7N^RevZuyxyS+jsk>=iLA52Ak+7L?2$ZDUaWdi1PgB z_;*Uae_n&7o27ewV*y(wwK~8~tU<#Np6UUIx}zW6fR&dKiPq|$A{BwG_-wVfkm+EP zxHU@m`im3cD#fH63>_X`Il-HjZN_hqOVMG;(#7RmI13D-s_>41l|vDH1BglPsNJ+p zTniY{Hwoief+h%C^|@Syep#722=wmcTR7awIzimAcye?@F~f|n<$%=rM+Jkz9m>PF70$)AK@|h_^(zn?!;={;9Zo7{ zBI7O?6!J2Ixxk;XzS~ScO9{K1U9swGvR_d+SkromF040|Slk%$)M;9O_8h0@WPe4= z%iWM^ust8w$(NhO)7*8uq+9CycO$3m-l}O70sBi<4=j0CeE_&3iRUWJkDM$FIfrkR zHG2|hVh3?Nt$fdI$W?<|Qq@#hjDijk@7eUr1&JHYI>(_Q4^3$+Zz&R)Z`WqhBIvjo zX#EbA8P0Qla-yACvt)%oAVHa#kZi3Y8|(IOp_Z6J-t{)98*OXQ#8^>vTENsV@(M}^ z(>8BXw`{+)BfyZB!&85hT0!$>7$uLgp9hP9M7v=5@H`atsri1^{1VDxDqizj46-2^ z?&eA9udH#BD|QY2B7Zr$l;NJ-$L!u8G{MZoX)~bua5J=0p_JnM`$(D4S!uF}4smWq zVo%kQ~C~X?cWCH zo4s#FqJ)k|D{c_ok+sZ8`m2#-Uk8*o)io`B+WTD0PDA!G`DjtibftJXhPVjLZj~g& z=MM9nF$7}xvILx}BhM;J-Xnz0=^m1N2`Mhn6@ct+-!ijIcgi6FZ*oIPH(tGYJ2EQ0 z{;cjcc>_GkAlWEZ2zZLA_oa-(vYBp7XLPbHCBcGH$K9AK6nx}}ya%QB2=r$A;11*~ z_wfru1SkIQ0&QUqd)%eAY^FL!G;t@7-prQ|drDn#yDf%Uz8&kGtrPxKv?*TqkC(}g zUx10<;3Vhnx{gpWXM8H zKc0kkM~gIAts$E!X-?3DWG&^knj4h(q5(L;V81VWyC@_71oIpXfsb0S(^Js#N_0E} zJ%|XX&EeVPyu}? zz~(%slTw+tcY3ZMG$+diC8zed=CTN}1fB`RXD_v2;{evY z@MCG$l9Az+F()8*SqFyrg3jrN7k^x3?;A?L&>y{ZUi$T8!F7Dv8s}}4r9+Wo0h^m= zAob@CnJ;IR-{|_D;_w)? zcH@~&V^(}Ag}%A90);X2AhDj(-YB>$>GrW1F4C*1S5`u@N{T|;pYX1;E?gtBbPvS* zlv3r#rw2KCmLqX0kGT8&%#A6Sc(S>apOHtfn+UdYiN4qPawcL{Sb$>&I)Ie>Xs~ej z7)a=-92!sv-A{-7sqiG-ysG0k&beq6^nX1L!Fs$JU#fsV*CbsZqBQ|y z{)}zvtEwO%(&mIG|L?qs2Ou1rqTZHV@H+sm8Nth(+#dp0DW4VXG;;tCh`{BpY)THY z_10NNWpJuzCG%Q@#Aj>!v7Eq8eI6_JK3g2CsB2jz)2^bWiM{&U8clnV7<2?Qx5*k_ zl9B$P@LV7Sani>Xum{^yJ6uYxM4UHnw4zbPdM|PeppudXe}+OcX z!nr!xaUA|xYtA~jE|436iL&L={H3e}H`M1;2|pLG)Z~~Ug9X%_#D!DW>w}Es!D{=4 zxRPBf5UWm2{}D>Em;v43miQ~2{>%>O*`wA{7j;yh;*DV=C-bs;3p{AD;>VPcn>E;V zLgtw|Y{|Beo+_ABz`lofH+cdf33LjIf!RdcW~wWgmsE%2yCQGbst4TS_t%6nS8a+m zFEr<|9TQzQC@<(yNN9GR4S$H-SA?xiLIK2O2>*w-?cdzNPsG4D3&%$QOK{w)@Dk}W z|3_Z>U`XBu7j6Vc=es(tz}c7k4al1$cqDW4a~|xgE9zPX(C`IsN(QwNomzsBOHqjd zi{D|jYSv5 zC>6#uB~%#!!*?zXW`!yHWjbjwm!#eo3hm;>nJ!<`ZkJamE6i>>WqkoTpbm(~b%G_v z`t3Z#ERips;EoA_0c?r@WjEP|ulD+hue5r8946Sd0kuBD$A!=dxigTZn)u3>U;Y8l zX9j(R*(;;i&HrB&M|Xnitzf@><3#)aKy=bFCf5Hz@_);{nlL?J!U>%fL$Fk~Ocs3& zB@-Ek%W>h9#$QIYg07&lS_CG3d~LrygXclO!Ws-|PxMsn@n{?77wCaq?uj`dd7lllDCGd?ed&%5k{RqUhiN1u&?uz@Fq zNkv_4xmFcl?vs>;emR1R<$tg;*Ayp@rl=ik z=x2Hk zJqsM%++e|*+#camAiem6f;3-khtIgjYmNL0x|Mz|y{r{6<@_&a7^1XDyE>v*uo!qF zBq^I8PiF#w<-lFvFx9xKoi&0j)4LX~rWsK$%3hr@ebDv^($$T^4m4h#Q-(u*Mbt6F zE%y0Fvozv=WAaTj6EWZ)cX{|9=AZDvPQuq>2fUkU(!j1GmdgeYLX`B0BbGK(331ME zu3yZ3jQ@2)WW5!C#~y}=q5Av=_;+hNi!%gmY;}~~e!S&&^{4eJuNQ2kud%Olf8TRI zW-Dze987Il<^!hCO{AR5tLW{F1WLuZ>nhPjke@CSnN zzoW{m!+PSCb7byUf-1b;`{0GU^zg7b9c!7ueJF`>L;|akVzb&IzoLNNEfxp7b7xMN zKs9QG6v@t7X)yYN9}3d4>*ROMiK-Ig8(Do$3UI&E}z!vcH2t(VIk-cLyC-Y%`)~>Ce23A=dQsc<( ziy;8MmHki+5-(CR8$=lRt{(9B9W59Pz|z0^;`C!q<^PyE$KXt!KibFH*xcB9V%xTD zn;YlZ*tTukwr$(mWMka@|8CW-J8!zCXI{P1-&=wSvZf&%9SZ7m`1&2^nV#D z6T*)`Mz3wGUC69Fg0Xk!hwY}ykk!TE%mr57TLX*U4ygwvM^!#G`HYKLIN>gT;?mo% zAxGgzSnm{}vRG}K)8n(XjG#d+IyAFnozhk|uwiey(p@ zu>j#n4C|Mhtd=0G?Qn5OGh{{^MWR)V*geNY8d)py)@5a85G&_&OSCx4ASW8g&AEXa zC}^ET`eORgG*$$Q1L=9_8MCUO4Mr^1IA{^nsB$>#Bi(vN$l8+p(U^0dvN_{Cu-UUm zQyJc!8>RWp;C3*2dGp49QVW`CRR@no(t+D|@nl138lu@%c1VCy3|v4VoKZ4AwnnjF z__8f$usTzF)TQ$sQ^|#(M}-#0^3Ag%A0%5vA=KK$37I`RY({kF-z$(P50pf3_20YTr%G@w+bxE_V+Tt^YHgrlu$#wjp7igF!=o8e2rqCs|>XM9+M7~TqI&fcx z=pcX6_MQQ{TIR6a0*~xdgFvs<2!yaA1F*4IZgI!)xnzJCwsG&EElg_IpFbrT}nr)UQy}GiK;( zDlG$cksync34R3J^FqJ=={_y9x_pcd%$B*u&vr7^ItxqWFIAkJgaAQiA)pioK1JQ| zYB_6IUKc$UM*~f9{Xzw*tY$pUglV*?BDQuhsca*Fx!sm`9y`V&?lVTH%%1eJ74#D_ z7W+@8@7LAu{aq)sPys{MM~;`k>T%-wPA)E2QH7(Z4XEUrQ5YstG`Uf@w{n_Oc!wem z7=8z;k$N{T74B*zVyJI~4d60M09FYG`33;Wxh=^Ixhs69U_SG_deO~_OUO1s9K-8p z5{HmcXAaKqHrQ@(t?d@;63;Pnj2Kk<;Hx=kr>*Ko`F*l){%GVDj5nkohSU)B&5Vrc zo0u%|b%|VITSB)BXTRPQC=Bv=qplloSI#iKV#~z#t#q*jcS`3s&w-z^m--CYDI7n2 z%{LHFZ*(1u4DvhES|Dc*n%JL8%8?h7boNf|qxl8D)np@5t~VORwQn)TuSI07b-T=_ zo8qh+0yf|-6=x;Ra$w&WeVZhUO%3v6Ni*}i&sby3s_(?l5Er{K9%0_dE<`7^>8mLr zZ|~l#Bi@5}8{iZ$(d9)!`}@2~#sA~?uH|EbrJQcTw|ssG)MSJJIF96-_gf&* zy~I&$m6e0nnLz^M2;G|IeUk?s+afSZ){10*P~9W%RtYeSg{Nv5FG<2QaWpj?d`;}<4( z>V1i|wNTpH`jJtvTD0C3CTws410U9HS_%Ti2HaB~%^h6{+$@5`K9}T=eQL;dMZ?=Y zX^z?B3ZU_!E^OW%Z*-+t&B-(kLmDwikb9+F9bj;NFq-XHRB=+L)Rew{w|7p~7ph{#fRT}}K zWA)F7;kJBCk^aFILnkV^EMs=B~#qh*RG2&@F|x2$?7QTX_T6qL?i$c6J*-cNQC~E6dro zR)CGIoz;~V?=>;(NF4dihkz~Koqu}VNPE9^R{L@e6WkL{fK84H?C*uvKkO(!H-&y( zq|@B~juu*x#J_i3gBrS0*5U*%NDg+Ur9euL*5QaF^?-pxxieMM6k_xAP;S}sfKmIa zj(T6o{4RfARHz25YWzv=QaJ4P!O$LHE(L~6fB89$`6+olZR!#%y?_v+Cf+g)5#!ZM zkabT-y%v|ihYuV}Y%-B%pxL264?K%CXlbd_s<GY5BG*`kYQjao$QHiC_qPk5uE~AO+F=eOtTWJ1vm*cU(D5kvs3kity z$IYG{$L<8|&I>|WwpCWo5K3!On`)9PIx(uWAq>bSQTvSW`NqgprBIuV^V>C~?+d(w$ZXb39Vs`R=BX;4HISfN^qW!{4 z^amy@Nqw6oqqobiNlxzxU*z2>2Q;9$Cr{K;*&l!;Y??vi^)G|tefJG9utf|~4xh=r3UjmRlADyLC*i`r+m;$7?7*bL!oR4=yU<8<-3XVA z%sAb`xe&4RV(2vj+1*ktLs<&m~mGJ@RuJ)1c zLxZyjg~*PfOeAm8R>7e&#FXBsfU_?azU=uxBm=E6z7FSr7J>{XY z1qUT>dh`X(zHRML_H-7He^P_?148AkDqrb>;~1M-k+xHVy>;D7p!z=XBgxMGQX2{* z-xMCOwS33&K^~3%#k`eIjKWvNe1f3y#}U4;J+#-{;=Xne^6+eH@eGJK#i|`~dgV5S zdn%`RHBsC!=9Q=&=wNbV#pDv6rgl?k1wM03*mN`dQBT4K%uRoyoH{e=ZL5E*`~X|T zbKG9aWI}7NGTQtjc3BYDTY3LbkgBNSHG$5xVx8gc@dEuJqT~QPBD=Scf53#kZzZ6W zM^$vkvMx+-0$6R^{{hZ2qLju~e85Em>1nDcRN3-Mm7x;87W#@RSIW9G>TT6Q{4e~b z8DN%n83FvXWdpr|I_8TaMv~MCqq0TA{AXYO-(~l=ug42gpMUvOjG_pWSEdDJ2Bxqz z!em;9=7y3HW*XUtK+M^)fycd8A6Q@B<4biGAR)r%gQf>lWI%WmMbij;un)qhk$bff zQxb{&L;`-1uvaCE7Fm*83^0;!QA5-zeSvKY}WjbwE68)jqnOmj^CTBHaD zvK6}Mc$a39b~Y(AoS|$%ePoHgMjIIux?;*;=Y|3zyfo)^fM=1GBbn7NCuKSxp1J|z zC>n4!X_w*R8es1ofcPrD>%e=E*@^)7gc?+JC@mJAYsXP;10~gZv0!Egi~){3mjVzs z^PrgddFewu>Ax_G&tj-!L=TuRl0FAh#X0gtQE#~}(dSyPO=@7yd zNC6l_?zs_u5&x8O zQ|_JvKf!WHf43F0R%NQwGQi-Dy7~PGZ@KRKMp?kxlaLAV=X{UkKgaTu2!qzPi8aJ z-;n$}unR?%uzCkMHwb56T%IUV)h>qS(XiuRLh3fdlr!Cri|{fZf0x9GVYUOlsKgxLA7vHrkpQddcSsg4JfibzpB zwR!vYiL)7%u8JG7^x@^px(t-c_Xt|9Dm)C@_zGeW_3nMLZBA*9*!fLTV$Uf1a0rDt zJI@Z6pdB9J(a|&T_&AocM2WLNB;fpLnlOFtC9yE6cb39?*1@wy8UgruTtX?@=<6YW zF%82|(F7ANWQ`#HPyPqG6~ggFlhJW#R>%p@fzrpL^K)Kbwj(@#7s97r`)iJ{&-ToR z$7(mQI@~;lwY+8dSKP~0G|#sjL2lS0LQP3Oe=>#NZ|JKKYd6s6qwe#_6Xz_^L4PJ5TM_|#&~zy= zabr|kkr3Osj;bPz`B0s;c&kzzQ2C8|tC9tz;es~zr{hom8bT?t$c|t;M0t2F{xI;G z`0`ADc_nJSdT`#PYCWu4R0Rmbk#PARx(NBfdU>8wxzE(`jA}atMEsaG6zy8^^nCu| z9_tLj90r-&Xc~+p%1vyt>=q_hQsDYB&-hPj(-OGxFpesWm;A(Lh>UWy4SH9&+mB(A z2jkTQ2C&o(Q4wC_>|c()M8_kF?qKhNB+PW6__;U+?ZUoDp2GNr<|*j(CC*#v0{L2E zgVBw6|3c(~V4N*WgJsO(I3o>8)EO5;p7Xg8yU&%rZ3QSRB6Ig6MK7Wn5r+xo2V}fM z0QpfDB9^xJEi}W*Fv6>=p4%@eP`K5k%kCE0YF2Eu5L!DM1ZY7wh`kghC^NwxrL}90dRXjQx=H>8 zOWP@<+C!tcw8EL8aCt9{|4aT+x|70i6m*LP*lhp;kGr5f#OwRy`(60LK@rd=to5yk^%N z6MTSk)7)#!cGDV@pbQ>$N8i2rAD$f{8T{QM+|gaj^sBt%24UJGF4ufrG1_Ag$Rn?c zzICg9`ICT>9N_2vqvVG#_lf9IEd%G5gJ_!j)1X#d^KUJBkE9?|K03AEe zo>5Rql|WuUU=LhLRkd&0rH4#!!>sMg@4Wr=z2|}dpOa`4c;_DqN{3Pj`AgSnc;h%# z{ny1lK%7?@rwZO(ZACq#8mL)|vy8tO0d1^4l;^e?hU+zuH%-8Y^5YqM9}sRzr-XC0 zPzY1l($LC-yyy*1@eoEANoTLQAZ2lVto2r7$|?;PPQX`}rbxPDH-a$8ez@J#v0R5n z7P*qT3aHj02*cK)WzZmoXkw?e3XNu&DkElGZ0Nk~wBti%yLh+l2DYx&U1lD_NW_Yt zGN>yOF?u%ksMW?^+~2&p@NoPzk`T)8qifG_owD>@iwI3@u^Y;Mqaa!2DGUKi{?U3d z|Efe=CBc!_ZDoa~LzZr}%;J|I$dntN24m4|1(#&Tw0R}lP`a`?uT;>szf^0mDJx3u z6IJvpeOpS$OV!Xw21p>Xu~MZ(Nas5Iim-#QSLIYSNhYgx1V!AR>b zf5b7O`ITTvW5z%X8|7>&BeEs8~J1i47l;`7Y#MUMReQ4z!IL1rh8UauKNPG?7rV_;#Y zG*6Vrt^SsTMOpV7mkui}l_S8UNOBcYi+DzcMF>YKrs3*(q5fwVCr;_zO?gpGx*@%O zl`KOwYMSUs4e&}eM#FhB3(RIDJ9ZRn6NN{2Nf+ z2jcz%-u6IPq{n7N3wLH{9c+}4G(NyZa`UmDr5c-SPgj0Sy$VN#Vxxr;kF>-P;5k!w zuAdrP(H+v{Dybn78xM6^*Ym@UGxx?L)m}WY#R>6M2zXnPL_M9#h($ECz^+(4HmKN7 zA>E;`AEqouHJd7pegrq4zkk>kHh`TEb`^(_ea;v{?MW3Sr^FXegkqAQPM-h^)$#Jn z?bKbnXR@k~%*?q`TPL=sD8C+n^I#08(}d$H(@Y;3*{~nv4RLZLw`v=1M0-%j>CtT( zTp#U03GAv{RFAtj4vln4#E4eLOvt zs;=`m&{S@AJbcl1q^39VOtmN^Zm(*x(`(SUgF(=6#&^7oA8T_ojX>V5sJx@*cV|29 z)6_%P6}e}`58Sd;LY2cWv~w}fer&_c1&mlY0`YNNk9q=TRg@Khc5E$N`aYng=!afD z@ewAv^jl$`U5;q4OxFM4ab%X_Jv>V!98w$8ZN*`D-)0S7Y^6xW$pQ%g3_lEmW9Ef^ zGmFsQw`E!ATjDvy@%mdcqrD-uiKB}!)ZRwpZRmyu+x|RUXS+oQ*_jIZKAD~U=3B|t zz>9QQr91qJihg9j9rWHww{v@+SYBzCfc0kI=4Gr{ZLcC~mft^EkJ`CMl?8fZ z3G4ix71=2dQ`5QuTOYA0(}f`@`@U<#K?1TI(XO9c*()q!Hf}JUCaUmg#y?ffT9w1g zc)e=JcF-9J`hK{0##K#A>m^@ZFx!$g09WSBdc8O^IdP&JE@O{i0&G!Ztvt{L4q%x& zGE2s!RVi6ZN9)E*(c33HuMf7#X2*VPVThdmrVz-Fyqxcs&aI4DvP#bfW={h$9>K0HsBTUf z2&!G;( z^oOVIYJv~OM=-i`6=r4Z1*hC8Fcf3rI9?;a_rL*nr@zxwKNlxf(-#Kgn@C~4?BdKk zYvL?QcQeDwwR5_S(`sn&{PL6FYxwb-qSh_rUUo{Yi-GZz5rZotG4R<+!PfsGg`MVtomw z5kzOZJrh(#rMR_87KeP0Q=#^5~r_?y1*kN?3Fq% zvnzHw$r!w|Soxz8Nbx2d&{!#w$^Hua%fx!xUbc2SI-<{h>e2I;$rJL)4)hnT5cx^* zIq#+{3;Leun3Xo=C(XVjt_z)F#PIoAw%SqJ=~DMQeB zNWQ={d|1qtlDS3xFik}#j*8%DG0<^6fW~|NGL#P_weHnJ(cYEdJtI9#1-Pa8M}(r{ zwnPJB_qB?IqZw5h!hRwW2WIEb?&F<52Ruxpr77O2K>=t*3&Z@=5(c^Uy&JSph}{Q^ z0Tl|}gt=&vK;Rb9Tx{{jUvhtmF>;~k$8T7kp;EV`C!~FKW|r$n^d6=thh`)^uYgBd zydgnY9&mm$?B@pKK+_QreOm?wnl5l}-wA$RZCZukfC$slxbqv9uKq0o^QeSID96{Rm^084kZ)*`P zk))V~+<4-_7d6<~)PL%!+%JP`Dn23vUpH47h~xnA=B_a}rLy|7U-f0W+fH`{wnyh2 zD$JYdXuygeP5&OAqpl2)BZ|X){~G;E|7{liYf%AZFmXXyA@32qLA)tuuQz`n^iH1Y z=)pAzxK$jw0Xq?7`M`=kN2WeQFhz)p;QhjbKg#SB zP~_Vqo0SGbc5Q;v4Q7vm6_#iT+p9B>%{s`8H}r|hAL5I8Q|ceJAL*eruzD8~_m>fg26HvLpik&#{3Zd#|1C_>l&-RW2nBBzSO zQ3%G{nI*T}jBjr%3fjG*&G#ruH^ioDM>0 zb0vSM8ML?tPU*y%aoCq;V%x%~!W*HaebuDn9qeT*vk0%X>fq-4zrrQf{Uq5zI1rEy zjQ@V|Cp~$AoBu=VgnVl@Yiro>ZF{uB=5)~i1rZzmDTIzLBy`8Too!#Z4nE$Z{~uB( z_=o=gKuhVpy&`}-c&f%**M&(|;2iy+nZy2Su}GOAH_GT9z`!ogwn$+Bi&1ZhtPF zVS&LO5#Bq}cew$kvE7*t8W^{{7&7WaF{upy0mj*K&xbnXvSP9V$6m6cesHGC!&Us36ld9f*Pn8gbJb3`PPT|ZG zri2?uIu09i>6Y-0-8sREOU?WaGke0+rHPb^sp;*E{Z5P7kFJ@RiLZTO`cN2mRR#Nz zxjJ##Nk+Uy-2N-8K_@576L(kJ>$UhP+)|w!SQHkkz+e62*hpzyfmY4eQLZtZUhEdG zIZluDOoPDlt5#iw+2epC3vEATfok^?SDT`TzBwtgKjY z>ZImbO)i~T=IYAfw$3j2mF1Cj*_yqK(qw(U^r-!gcUKvWQrDG@E{lEyWDWOPtA9v{ z5($&mxw{nZWo_Ov??S#Bo1;+YwVfx%M23|o$24Hdf^&4hQeV=Cffa5MMYOu2NZLSC zQ4UxWvn+8%YVGDg(Y*1iHbUyT^=gP*COcE~QkU|&6_3h z-GOS6-@o9+Vd(D7x#NYt{Bvx2`P&ZuCx#^l0bR89Hr6Vm<||c3Waq(KO0eZ zH(|B;X}{FaZ8_4yyWLdK!G_q9AYZcoOY}Jlf3R;%oR5dwR(rk7NqyF%{r>F4s^>li z`R~-fh>YIAC1?%!O?mxLx!dq*=%IRCj;vXX628aZ;+^M0CDFUY0Rc<1P5e(OVX8n- z*1UOrX{J}b2N)6m5&_xw^WSN=Lp$I$T>f8K6|J_bj%ZsIYKNs1$TFt!RuCWF48;98`7D(XPVnk+~~i=U$} zR#;!ZRo4eVqlDxjDeE^3+8)bzG_o~VRwdxqvD^HNh#@o>1My$0*Y_`wfQ$y}az|Uz zM47oEaYNTH?J^w9EVNnvfmmbV+GHDe)Kf;$^@6?9DrSHnk@*{PuJ>ra|9KO!qQ-Fp zNNcZB4ZdAI>jEh@3Mt(E1Fy!^gH-Zx6&lr8%=duIgI^~gC{Q;4yoe;#F7B`w9daIe z{(I;y)=)anc;C;)#P`8H6~iAG_q-4rPJb(6rn4pjclGi6$_L79sFAj#CTv;t@94S6 zz`Id7?k!#3JItckcwOf?sj=Xr6oKvAyt1=jiWN@XBFoW6dw_+c9O9x2i4or?*~8f& zm<>yzc6Aw_E-gsGAa`6`cjK~k^TJt(^`E1^_h)5(8)1kzAsBxjd4+!hJ&&T!qklDN z`?j#za=(^wRCvEI75uE^K#IBe5!5g2XW}|lUqAmdmIQb7xJtP}G9^(=!V`ZS_7#RZ zjXq#Cekw>fE*YS-?Qea|7~H?)bbLK;G&(~%!B@H`o#LYAuu6;-c~jFfjY7GKZ|9~{ zE!`!d@@rhY_@5fDbuQ8gRI~R_vs4%fR5$?yot4hDPJ28k_Wzmc^0yzwMr#*(OXq@g zRUgQmJA?E>3GO=5N8iWIfBP{&QM%!Oa*iwTlbd0Fbm*QCX>oRb*2XfG-=Bz1Qz0$v zn#X!2C!LqE601LEMq;X7`P*5nurdKZAmmsI-zZ|rTH;AFxNDyZ_#hN2m4W(|YB64E z470#yh$;8QzsdA;6vbNvc95HLvZvyT4{C>F(fwy&izvNDuvfO1Z;`Ss#4a_c6pm*{0t|_i9z{@84^lffQa5zG4<{(+p5-S z^>lG-^GJR#V>;5f3~y%n=`U_jBp~WgB0cp;Lx5VZYPYCH&(evw#}AYRlGJ>vcoeVr z3%#-QUBgeH!GB>XLw;rT&oMI9ynP;leDwh4O2uM!oIWo&Qxk{^9#nX&^3GJ z(U~5{S9aw@yHH^yuQGso=~*JOC9Zdi6(TFP+IddkfK5Eu9q;+F9?PPNAe-O;;P_Aa zPJ{Dqa1gQb%dZ|0I{#B0(z|r(qq!A4CxlW92-LwXFjYfOzAT1DDK`9rm4AB~l&oVv zi6_{)M9L1%JP}i52y@`!T9RB~!CRel53wl?amNHqcuElq%hn)|#BPvW5_m51RVb|? zXQ&B*eAD}}QamG>o{?i~usG5X6IDa3+Xkb8w%7;C8|Cln70biA+ZH}fxkH^Wei$vZPnuqIT!Mmy26;mLfU z3Bbv4M^vvMlz-I+46=g>0^wWkmA!hlYj*I!%it^x9Kx(d{L|+L{rW?Y#hLHWJfd5X z>B=Swk8=;mRtIz}Hr3NE_garb5W*!7fnNM{+m2_>!cHZZlNEeof~7M#FBEQ+f&gJ3 z^zv*t?XV)jQi%0-Ra|ISiW-fx)DsK-> zI}Fv%uee$#-1PKJwr=lU89eh=M{>Nk7IlJ)U33U)lLW+OOU%A|9-Lf;`@c*+vX{W2 z{{?0QoP!#?8=5%yL=fP%iF+?n$0#iHz`P;1{Ra6iwr=V7v^8;NoLJ5)QxIyIx>ur?lMwV=mBo0BA?28kMow8SX=Ax5L%S~x4+EQi#Ig`(ht%)D(F#Pa!)SiHy&PvUp32=VtAsR|6|NZR@jkad zX^aEgojf9(-)rNOZ=NVA&a;6Cljkb=H-bY9m^_I)`pBHB16QW)sU27zF13ypefeATJc1Wzy39GrKF{UntHsIU59AdXp?j{eh2R)IbU&omd zk6(qzvE@hve1yM6dgkbz>5HDR&MD~yi$yymQ}?b;RfL$N-#l7(u?T^Wlu+Q;fo|jd zBe^jzGMHY(2=5l?bEIh+zgE$1TEQ&!p3fH;AW`P?W5Hkj3eJnT>dqg! zf~}A*SZU5HHDCbdywQ^l_PqssHRlrySYN=`hAv2sVrtcF!`kyEu%XeeRUTJU7vB%h zY0*)N$mLo6d=tJfe}IPIeiH~>AKwCpkn&WEfYgl?3anq5#-F$6$v-(G_j0*S9mdsn zg@ek_ut4(?+JP_9-n`YqoD(gAz+Ttm1#t za96D}oQR(o=e8wwes19_(p4g(A1vSGwPAp~Hh3hh!fc>u{1E^+^}AzwilFVf6^vbL zc&NnRs`u)N-P|Cu4()yTiuE{j_V&=K?iP!IUBf~ei2}~_KBvUAlXa;R#Wl`gOBtJ$Y5(L))@`riLB)v*r>9*8VfmQt<72?+fdwP{BA@?_qo>mN7yzICUCaeG(+>Rb~8wg~6U(P)NlDLuhQgjbC}=)HuZgC}0Z-qLX4lJ7^)8~!!*qP0=~`Y_(A z{@15*ZevZSI^s|OnpCeCwLXf#tgbq8y~R*GB5anmZ;_N!+-3>!wu@NBFCNJ$#y?{? zMI!?s*=_xA;V&aX)ROxzVW8*de+&P#2zucA|8mksdgCXBsZ*TM=%{L1Tk5LB_*^@&S?O=ot{h)1xRVSn27&Tk8>rF|6ruzYb;Nq) z;qvlmrP^SL$mhe4Ai)xpl6Wx&y;z8o!7-+6$qj;ZLXvfR71I@w(R|6lyuP6v-lP&r z@KK-TEmGQfMmk1c0^fd7!^si}T%b5a2%>T-Drh|^Cf z$}qxIv@zxbmJ#qjK6Q_aGDe{ciVT20V1lW52Xs!}x(4_j)sUXYdm4 zwYC9FOa;X*c*LxL;xE5ov?|?^7gWXyALy_D2GvDo-8%0-Y%9TkkO_Tcr2qIUg3(OC z%3wt?hyn*+e^z%(~2#!2dvMFa$mzgwk1I1X;naFMjXSbnmZ!zd%7u)=cgi z*0&@Scrl&BDfU(9Pks8#;!~v~r7~DN{G6WE&_;7i{{a*?oiCao(l%2ruxX0fAt69e2vLgL%Mf_)!*(Tz zNKW>sW@YB2vBfP>C&L|-pq)Uq^PsG_THu;8iEcqafO?0k$IQp1KyWyOoTxwmKvlc^ zO9$%Tt8;%qQxwy5;CsJ)V}a7I6}SvQ%0_H53Kcqx=m83fIzpLSGgfVe^SPdc*xPdciI5dg}#{Etv$e<)gGD=qm0v=!aN@*?$s zLhzD%4w{vf-g6FHQjG9XyC+4=bewb?Mz%!u8%oP{G9{UJFTLTcCi3R(=Nm&t&Sl(? zr>pj?=ECdDVa}-g%`LF^1EY@>7d}%VhYpKFSDPH)D(zB+gPe1m7E}W>TiW=8L0&(D&YG=0<&7G4Bu{;-#Ud;-1%Ta9V}U6fyK1YX z`Rq|i-X(loPZ)M$H%m@j7bGx>uj~y=0)!t#dc|c}+hT%~Sq>fefez0Ul|jOJHta~u zx7*mV6~Jpt(FkY(pQN91>aFk7VS%Sa^oLaq$*)W?fy`xuFJgH<2s=!Rz}_(qdmdF~ zlr2f=)q_vpi8X;Jq>5^$GweJ{iS`Khw2f)fsvKpgh;U~13a+9 zfaw}UuGiBy;q10pI^Avb#X3D=k_r(T{N;-xA)OM}2Py5L##<96NU*Sr7GQqhfrPej z?;B$Bt_sTxuSAPXfTSC{zr?@$$0iHxC@z*5F52j*PG87hh`0w3At8jPf*rjNE~_Gj z2)fjeUFJ(#l9uWuw&5#@13|AQ1;pdA?EL4YKq0JDR5T8I?aWGxI=J9}vdyH;gQ@iE z>+UnC2iwT0f80-VuE^bY!N@(}9?bOXyy%rTqSNDN4rO4Zt#(kZwcGgTp&3((F+nsd ze~B)%K6oP4WX_w1>|QImC;9q zy}4p+s%^Too2(gE>yo%+yY#F{)phtmNqsJPVQQ0lGR|H9q>aA&AtU4M+EZ%`xvQLb zbigBOc`dL}&j3er?EOI`!W)N#>+uwp_!h^5FspaEylq!e(FPY-6T3~WeNmZ<$?Y6y z-!bM1kD7ZF8xl+Pi6fiv1?)q%`aNxn#pK%)ct||L&Xnf8Gu&3g;Of{B8Pt=u`e+Mn zA(DmU#3cF#Nr7W;X0V4ksFHMcNDAf4G&D8VjLeZ^|5-f$>_|71>P3xuu)?4NJed*w z6GR_RB5HQLzT(h+`Y?-3esxeue{-Q%b+!&o>IJ!#=}#_&q+hwJga>fkt(*(WdoN5vSta z#$mMN6}YzYRpaBZ)j)EL91-oL1(|d(>%UclsTUOyXyWM&(hNqLwqtn`!E>HJM{ zh>M~xa1@*U^cwx-k5QjePr5=B6u*jpJ)C0{C?f7Yga+I^4$TleyX$x&jm9z@c!?cC z<2kY7)p^+W{AXd@l1C09_yB*TG|yzb96BYk z8Wpj81vB>zcR+qM4m~A44w1n7$fxB$-?MV}S?Fh}c_|2FXg`cZ?750i;Cdl-_nGK# zta)h)6!*AsQ-z8caSh)%5JY>_yCeJs~FpAzdY8 zF@SU_hN#~ip5I;UACFzx1v0yf{j97l&)e-=`d#1Kp6A(Kj&HC!%vK!wEdK3HFJ?|6 za;WwUczZ+&<$g!Td^48@lJtfW@doXL#jY6)dK_RDCQAZ}l&OdD+?Yl5-bqpsHZR^( zF{u_cR(x>u(c4i5f(^8!h6CV0#ZxRFhLlunWiGDLO6yoRb(wV<(P^8=fOU7Hp{AHE z;Yg%kg@6&tL3Z*IrbkDeQ$%rbalVP39D@LVrC2xSavnTp%PorXPf1DVzHyqjDsDnS zL=mv0a2s60bHKGQM)ue>npH0SCp;XtZFUzm?R-x7D*(PxMmuJ4J*K2eY&ebe0yQHe zVG&*qe{pot{PM^xQv`H_rn2FcYOrEN+I#uX^1`Id%J$;Hi2cNCU!0Hlc0TjxLzkss zHxmC;hQBu5U4J0XflWM;{uH`_47Sg)QyZ{8D&T0;bdc3{^^<=q7P?C_2E-}PQn>*= z2T5q^J|Q_2+x%Qt`i3m6=6V$)BxIx{2KAFkMb#q`iMCD|L>+}_dYVA$wBr1Zr}YOF z^MMGO@PHGGh>g|^yF`PvvtDwN@kxt?ClLcG<+murHMz1Asj!$l=b)4{d}SqOJ}>Y< zSeAyP@ZEcpx`ayIdp>{--UVLYC_cZZURh_!4u2(*#x@Tk(QJa}4BqqZ$6%LhF-HB~ zAcc?$I6KP}IxANcAteEBX$Ys?T=JB|Fnd3*UAO0mYAXCgWf~?7Z_G7G5`H4;S^QKK zG*2l75vI@DHQC*es>6&|r^#RHKRQ5rwv_l4`!(!I3%)Z$P1fnZ8N@27zyg}54ElO%SjQ_4uujX)4ta@Gz2)_>4b~vX|rhRIH-eqdD zL)xaEpW3K|a>daQRRR*_$W>rWOsW-IE4VQl3L$3}=-PFU)s@XG&9+DFivH-;2&w~$ES_nJZJH!?1mO!CnP)Jb{mW9=f`bDpo^PI6i4|YurK)Q1 z^Ys1oHRdr!$X4RuyR%kgp!a*Lz*_AAoJ$EVAdsNCoPA^VZE1pGO@D3UStACE+%vs6 z$io@E>DmB|3VV~GbOt2oc+K;t zdn3gaFvYz;vRN-+2+Qk{8|O}e86nVck)fZn3sg$j#dLVham{yGkc$I#!HF7mRS%f* z!+NdzG49K(qaO^SBlp@K@D?|^rAq;8{*@kRc4sYSNQmoy7@_RS_ksWl2T_38h2A)# ziU2WXWD03(NqS&Mu*?0-iK8X_Z3w`}c7MPv0qZ7iM|L3xdTnR{y!7{#82$}uJCiGT zqa=8<9L05hu6 z1N+2n7OzT{NEf?gS@eq7@buCDFe9mAxY%THo^b@BHckKK>jg6{@)>n z43cPs%$Qi0iwyZ+{C491>FRu5+6baJ{&XXXC@Sp+b!QE|{7_d?lm5K=B z)myKEcxjFm74+drF|JCYcxdY%ASig#YoRBRUV7An7f-%rqj%PHECbxh#5476cEq@NQL?dI6gUqvS@w zq!WmD(aR0{NxItAZCKDCVw=Zu{9WGDu^i?2g zLerPiOU*HSaXg^3CdOX^F6c9MiHINP339N%)a96`^Z-c#&EogcxMSYo0Cb4{-}q1( zRrJine`P|6WRkm8u4Ja1QRYq$AR>b7tugd#EsT-VmXN-t!TYjZy}i!uKi6$u>EJ?w zvdHZg+hp+5ree?>fdJAX)5#Wtm#2M-{~2jfX2{G`)?D6UD1MevdeeU;;HCi}AtJr( SGW6ptSs!X7{rG*o_g?|vpSEZK diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 865f1ba80d1e6..fcbbad6dd644c 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d -distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip +distributionSha256Sum=194717442575a6f96e1c1befa2c30e9a4fc90f701d7aee33eb879b79e7ff05c0 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/x-pack/plugin/ent-search/build.gradle b/x-pack/plugin/ent-search/build.gradle index 4551011b03ca1..efd33cd163fdc 100644 --- a/x-pack/plugin/ent-search/build.gradle +++ b/x-pack/plugin/ent-search/build.gradle @@ -34,8 +34,6 @@ dependencies { javaRestTestImplementation(project(path: xpackModule('core'))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation(project(':modules:lang-mustache')) - - module ':modules:search-business-rules' } testClusters.configureEach { From d6d1edd52959f358ce1df438d01ddf03ef8b5c21 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 25 Mar 2024 09:22:20 -0400 Subject: [PATCH 152/214] ESQL: Fix typo in docs readme s/and/are/ --- docs/reference/esql/functions/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index fd310ebacfe7e..0f0f3b6e3cbb8 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -1,4 +1,4 @@ -The files in these subdirectories and generated by ESQL's test suite: +The files in these subdirectories are generated by ESQL's test suite: * `description` - description of each function scraped from `@FunctionInfo#description` * `examples` - examples of each function scraped from `@FunctionInfo#examples` * `parameters` - description of each function's parameters scraped from `@Param` From 8010b4e8e5dd1a78db6350248f68eee19f92422c Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 25 Mar 2024 14:45:13 +0100 Subject: [PATCH 153/214] SharedBlobCacheService.maybeFetchRegion should use computeCacheFileRegionSize (#106685) This method computes the exact ranges to fetch using the length of the blob, but that does not work for SharedBlobCacheService implementations that use a specific computeCacheFileRegionSize which is not based on blob length. --- docs/changelog/106685.yaml | 5 +++++ .../blobcache/shared/SharedBlobCacheService.java | 6 +----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/106685.yaml diff --git a/docs/changelog/106685.yaml b/docs/changelog/106685.yaml new file mode 100644 index 0000000000000..ed4a16ba0666c --- /dev/null +++ b/docs/changelog/106685.yaml @@ -0,0 +1,5 @@ +pr: 106685 +summary: '`SharedBlobCacheService.maybeFetchRegion` should use `computeCacheFileRegionSize`' +area: Snapshot/Restore +type: bug +issues: [] diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 934aeef26843f..0d51a4271e85b 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -561,12 +561,8 @@ public void maybeFetchRegion( listener.onResponse(false); return; } - long regionLength = regionSize; try { - if (region == getEndingRegion(blobLength)) { - regionLength = blobLength - getRegionStart(region); - } - ByteRange regionRange = ByteRange.of(0, regionLength); + ByteRange regionRange = ByteRange.of(0, computeCacheFileRegionSize(blobLength, region)); if (regionRange.isEmpty()) { listener.onResponse(false); return; From 1cc61107221a71890ad534567e31d4ef6007eb99 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 25 Mar 2024 13:50:32 +0000 Subject: [PATCH 154/214] Add isPatchFrom method to check for future patch versions (#106712) --- .../org/elasticsearch/TransportVersion.java | 24 +++++++++++++++++++ .../elasticsearch/TransportVersionTests.java | 12 ++++++++++ 2 files changed, 36 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 22e02652e9f68..48970b97e480e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -101,6 +101,30 @@ public static TransportVersion fromString(String str) { return TransportVersion.fromId(Integer.parseInt(str)); } + /** + * Returns {@code true} if this version is a patch version at or after {@code version}. + *

      + * This should not be used normally. It is used for matching patch versions of the same base version, + * using the standard version number format specified in {@link TransportVersions}. + * When a patch version of an existing transport version is created, {@code transportVersion.isPatchFrom(patchVersion)} + * will match any transport version at or above {@code patchVersion} that is also of the same base version. + *

      + * For example, {@code version.isPatchFrom(8_800_00_4)} will return the following for the given {@code version}: + *

        + *
      • {@code 8_799_00_0.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      • {@code 8_799_00_9.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      • {@code 8_800_00_0.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      • {@code 8_800_00_3.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      • {@code 8_800_00_4.isPatchFrom(8_800_00_4)}: {@code true}
      • + *
      • {@code 8_800_00_9.isPatchFrom(8_800_00_4)}: {@code true}
      • + *
      • {@code 8_800_01_0.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      • {@code 8_801_00_0.isPatchFrom(8_800_00_4)}: {@code false}
      • + *
      + */ + public boolean isPatchFrom(TransportVersion version) { + return onOrAfter(version) && id < version.id + 10 - (version.id % 10); + } + /** * Returns a string representing the Elasticsearch release version of this transport version, * if applicable for this deployment, otherwise the raw version number. diff --git a/server/src/test/java/org/elasticsearch/TransportVersionTests.java b/server/src/test/java/org/elasticsearch/TransportVersionTests.java index b8b8380ee4a96..2de973622248b 100644 --- a/server/src/test/java/org/elasticsearch/TransportVersionTests.java +++ b/server/src/test/java/org/elasticsearch/TransportVersionTests.java @@ -159,6 +159,18 @@ public void testMax() { } } + public void testIsPatchFrom() { + TransportVersion patchVersion = TransportVersion.fromId(8_800_00_4); + assertThat(TransportVersion.fromId(8_799_00_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_799_00_9).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_3).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_800_00_4).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_00_9).isPatchFrom(patchVersion), is(true)); + assertThat(TransportVersion.fromId(8_800_01_0).isPatchFrom(patchVersion), is(false)); + assertThat(TransportVersion.fromId(8_801_00_0).isPatchFrom(patchVersion), is(false)); + } + public void testVersionConstantPresent() { Set ignore = Set.of(TransportVersions.ZERO, TransportVersion.current(), TransportVersions.MINIMUM_COMPATIBLE); assertThat(TransportVersion.current(), sameInstance(TransportVersion.fromId(TransportVersion.current().id()))); From d44b9b64844a6ff6928f8b9f10803340d8cb3184 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Mon, 25 Mar 2024 15:08:54 +0100 Subject: [PATCH 155/214] Test knn with qvb assert ids and result size (#106686) yamlRestTest for knn with query_vector_builder assert document ids directly and check result size --- .../ml/search_knn_query_vector_builder.yml | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml index 50f687f704994..4cab2c7908748 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml @@ -75,17 +75,17 @@ setup: index: index-with-embedded-text refresh: true body: | - {"index": {}} + {"index": {"_id": "0"}} {"source_text": "the octopus comforter smells", "embedding":[0.3925197124481201, 0.9145996570587158, 0.01372915506362915, 0.9987854957580566, 0.3240084648132324, 0.6188188195228577, 0.926924467086792, 0.12143599987030029, 0.175662100315094, 0.16076070070266724, 0.7671306133270264, 0.9518267512321472, 0.4557478427886963, 0.5410670638084412, 0.7594802975654602, 0.5035953521728516, 0.4115469455718994, 0.038427770137786865, 0.5419668555259705, 0.6362232565879822, 0.17554593086242676, 0.01821446418762207, 0.2931918501853943, 0.294437050819397, 0.6901726722717285, 0.1679999828338623, 0.7995195984840393, 0.8781598210334778, 0.18507736921310425, 0.8614458441734314, 0.690071702003479, 0.7859554886817932, 0.803643524646759, 0.0048452019691467285, 0.19700628519058228, 0.22210919857025146, 0.7043975591659546, 0.6320799589157104, 0.542057991027832, 0.8704766035079956, 0.32195907831192017, 0.7272325158119202, 0.4066658020019531, 0.89588862657547, 0.7947880029678345, 0.06543421745300293, 0.2873639464378357, 0.8773637413978577, 0.36480581760406494, 0.692948043346405, 0.19171112775802612, 0.14275449514389038, 0.17054951190948486, 0.8969640135765076, 0.39838290214538574, 0.26756417751312256, 0.5369327664375305, 0.4736328721046448, 0.21181154251098633, 0.2695402503013611, 0.8651300072669983, 0.8051849603652954, 0.7073134779930115, 0.5963589549064636, 0.09601861238479614, 0.5362404584884644, 0.23020631074905396, 0.8515381813049316, 0.5730932354927063, 0.7235705256462097, 0.08228331804275513, 0.5840849280357361, 0.6030013561248779, 0.2084050178527832, 0.7312950491905212, 0.6159517168998718, 0.6482340693473816, 0.07220339775085449, 0.5136227607727051, 0.9152160286903381, 0.8169018030166626, 0.15515869855880737, 0.7978536486625671, 0.564482569694519, 0.4757157564163208, 0.2718064785003662, 0.6910138726234436, 0.5675734877586365, 0.702862024307251, 0.19079893827438354, 0.8995556235313416, 0.4988499879837036, 0.6378234028816223, 0.2683940529823303, 0.21990180015563965, 0.8442690372467041, 0.8502047061920166, 0.9857811331748962, 0.3549607992172241, 0.7605474591255188]} - {"index": {}} + {"index": {"_id": "1"}} {"source_text": "the machine is leaking", "embedding":[0.09775793552398682, 0.9594467282295227, 0.7915146946907043, 0.9140479564666748, 0.5148435235023499, 0.8556410670280457, 0.6022665500640869, 0.05222177505493164, 0.9821935296058655, 0.49276530742645264, 0.23147249221801758, 0.2428399920463562, 0.3865380883216858, 0.5778483748435974, 0.5600519776344299, 0.9427472352981567, 0.48832541704177856, 0.8807493448257446, 0.32909590005874634, 0.8452557325363159, 0.811530590057373, 0.13344216346740723, 0.15256845951080322, 0.5025331974029541, 0.4288772940635681, 0.6590417623519897, 0.9282752871513367, 0.8842046856880188, 0.7873250842094421, 0.356731653213501, 0.9959152936935425, 0.07572609186172485, 0.5062583088874817, 0.36245888471603394, 0.6189196705818176, 0.7766605019569397, 0.5198523998260498, 0.7379586100578308, 0.0553441047668457, 0.5035901665687561, 0.24139636754989624, 0.10798943042755127, 0.272808313369751, 0.38171595335006714, 0.24275553226470947, 0.956981897354126, 0.8182021379470825, 0.9383817315101624, 0.06551980972290039, 0.6892690658569336, 0.7068917751312256, 0.5184322595596313, 0.6103079319000244, 0.7020677328109741, 0.7181660532951355, 0.6477184295654297, 0.26282840967178345, 0.9316624402999878, 0.8318467140197754, 0.1487215757369995, 0.39937925338745117, 0.6842989921569824, 0.3496543765068054, 0.6008991003036499, 0.9530165791511536, 0.4209877848625183, 0.5675879716873169, 0.7883706092834473, 0.9547191858291626, 0.6292906403541565, 0.49566715955734253, 0.6907342672348022, 0.0834314227104187, 0.19785481691360474, 0.4896165728569031, 0.8460168838500977, 0.9680339097976685, 0.43386441469192505, 0.7068926095962524, 0.19123870134353638, 0.5661664009094238, 0.610595166683197, 0.23599380254745483, 0.2831611633300781, 0.7919651865959167, 0.0018386244773864746, 0.15559959411621094, 0.4622604250907898, 0.02038663625717163, 0.42241227626800537, 0.4200526475906372, 0.1223069429397583, 0.7035380005836487, 0.09902423620223999, 0.7804107666015625, 0.05339455604553223, 0.6485095024108887, 0.29347676038742065, 0.9716366529464722, 0.30257928371429443]} - {"index": {}} + {"index": {"_id": "2"}} {"source_text": "these are my words", "embedding":[0.7000167369842529, 0.590781033039093, 0.009879708290100098, 0.7874260544776917, 0.797156572341919, 0.1791083812713623, 0.07826781272888184, 0.25102007389068604, 0.09334254264831543, 0.3819708824157715, 0.7312374711036682, 0.02819347381591797, 0.20099765062332153, 0.7702597975730896, 0.9443559050559998, 0.35520339012145996, 0.25699591636657715, 0.5596823692321777, 0.23947590589523315, 0.47478222846984863, 0.23411548137664795, 0.9809996485710144, 0.3806597590446472, 0.5006771087646484, 0.5724453926086426, 0.21510547399520874, 0.07062828540802002, 0.9858258962631226, 0.9636645317077637, 0.36034029722213745, 0.07260054349899292, 0.06882566213607788, 0.18354403972625732, 0.06756395101547241, 0.5749042630195618, 0.05275309085845947, 0.1865217685699463, 0.5852730870246887, 0.1086682677268982, 0.10090464353561401, 0.32582908868789673, 0.5494027733802795, 0.873362123966217, 0.02236837148666382, 0.37973177433013916, 0.5556552410125732, 0.5083678364753723, 0.8081125020980835, 0.09164196252822876, 0.2207810878753662, 0.8086426258087158, 0.271828293800354, 0.5981417298316956, 0.7745779156684875, 0.40872830152511597, 0.6035888195037842, 0.5598325133323669, 0.19086670875549316, 0.02406853437423706, 0.8299782872200012, 0.4994274377822876, 0.0300295352935791, 0.47190529108047485, 0.8889331817626953, 0.34195321798324585, 0.9380808472633362, 0.4418332576751709, 0.5789303779602051, 0.0526617169380188, 0.7349719405174255, 0.44571834802627563, 0.6602563261985779, 0.3819742202758789, 0.16881734132766724, 0.45588219165802, 0.028081774711608887, 0.6681976914405823, 0.8183007836341858, 0.7887755632400513, 0.4506028890609741, 0.8040162324905396, 0.431918203830719, 0.7408918738365173, 0.39756304025650024, 0.7438145875930786, 0.6120601892471313, 0.5724676251411438, 0.08701330423355103, 0.18344634771347046, 0.7226220369338989, 0.3648560643196106, 0.9813777208328247, 0.2615315318107605, 0.9847549796104431, 0.32967478036880493, 0.47099196910858154, 0.3591546416282654, 0.4132147431373596, 0.48631107807159424, 0.04420149326324463]} - {"index": {}} + {"index": {"_id": "3"}} {"source_text": "washing machine", "embedding":[0.7044712901115417, 0.12284207344055176, 0.5008929967880249, 0.04643195867538452, 0.3666788339614868, 0.26660799980163574, 0.24114298820495605, 0.0761682391166687, 0.5294214487075806, 0.16935181617736816, 0.6257967948913574, 0.2804388999938965, 0.6417903900146484, 0.169958233833313, 0.4216839075088501, 0.6773303747177124, 0.9472144842147827, 0.21874648332595825, 0.5095921754837036, 0.839306116104126, 0.6176233291625977, 0.5847064852714539, 0.6748610734939575, 0.3264034390449524, 0.4112023115158081, 0.13818275928497314, 0.08356589078903198, 0.4147903323173523, 0.5626787543296814, 0.7167286276817322, 0.6314535737037659, 0.23092854022979736, 0.34547603130340576, 0.7425565719604492, 0.2837678790092468, 0.47037917375564575, 0.1555209755897522, 0.5618507266044617, 0.2076261043548584, 0.3026384711265564, 0.04561811685562134, 0.1691250205039978, 0.2504339814186096, 0.5350574851036072, 0.26857447624206543, 0.23607933521270752, 0.16938960552215576, 0.23708534240722656, 0.026302993297576904, 0.16901731491088867, 0.2847784757614136, 0.944273829460144, 0.28171658515930176, 0.9864799380302429, 0.6811433434486389, 0.9383156895637512, 0.5682582259178162, 0.14361613988876343, 0.7900274395942688, 0.27808505296707153, 0.05677521228790283, 0.08594226837158203, 0.6450491547584534, 0.06500720977783203, 0.36045730113983154, 0.1987738013267517, 0.07287931442260742, 0.5315744280815125, 0.04742676019668579, 0.7842378616333008, 0.0881078839302063, 0.7612627744674683, 0.2528950572013855, 0.27305954694747925, 0.03027820587158203, 0.4686838984489441, 0.13311690092086792, 0.048372089862823486, 0.808062732219696, 0.44010263681411743, 0.5726178288459778, 0.15828031301498413, 0.4597446322441101, 0.6375324130058289, 0.8452948927879333, 0.9763500690460205, 0.5094607472419739, 0.3535742163658142, 0.664739191532135, 0.40749913454055786, 0.8537857532501221, 0.5830079913139343, 0.7949922680854797, 0.6309236288070679, 0.07258343696594238, 0.1224660873413086, 0.24250483512878418, 0.36189037561416626, 0.5156043171882629, 0.1819135546684265]} - {"index": {}} + {"index": {"_id": "4"}} {"source_text": "washing machine smells", "embedding":[0.7249823808670044, 0.3981819152832031, 0.4572623372077942, 0.7442894577980042, 0.15898281335830688, 0.6481881737709045, 0.1513708233833313, 0.8945682644844055, 0.7708938121795654, 0.5494217276573181, 0.48253726959228516, 0.39402270317077637, 0.6369197368621826, 0.7152248024940491, 0.6326345205307007, 0.7362181544303894, 0.350342333316803, 0.16101288795471191, 0.4180338382720947, 0.04114532470703125, 0.002633512020111084, 0.20396709442138672, 0.8963556885719299, 0.1552276611328125, 0.7476853728294373, 0.9651047587394714, 0.7527561187744141, 0.7041972279548645, 0.12461084127426147, 0.6282403469085693, 0.9631509184837341, 0.16590750217437744, 0.4101366400718689, 0.31320667266845703, 0.13579899072647095, 0.2895740270614624, 0.9905323386192322, 0.02118372917175293, 0.637545645236969, 0.5133231282234192, 0.679695188999176, 0.04641437530517578, 0.21913814544677734, 0.16534924507141113, 0.02987360954284668, 0.14805591106414795, 0.16874665021896362, 0.9378783702850342, 0.8607399463653564, 0.7287217974662781, 0.5402306318283081, 0.9973209500312805, 0.26169413328170776, 0.3835873603820801, 0.1874808669090271, 0.8038567304611206, 0.18557673692703247, 0.8631893992424011, 0.7676172256469727, 0.3599127531051636, 0.48698097467422485, 0.926689088344574, 0.6542723774909973, 0.49722349643707275, 0.7027173638343811, 0.13385021686553955, 0.9873734712600708, 0.17187494039535522, 0.7995050549507141, 0.5259199142456055, 0.33804380893707275, 0.21665722131729126, 0.952264130115509, 0.8337767720222473, 0.879487156867981, 0.5553549528121948, 0.6160674095153809, 0.1315295696258545, 0.8010737895965576, 0.834412693977356, 0.20340144634246826, 0.8993185758590698, 0.6493895649909973, 0.9454924464225769, 0.38529330492019653, 0.6891772150993347, 0.5530646443367004, 0.18555349111557007, 0.8361382484436035, 0.11815804243087769, 0.38942235708236694, 0.945141613483429, 0.6417409181594849, 0.39776402711868286, 0.5133314728736877, 0.5431299805641174, 0.2615429759025574, 0.8987119793891907, 0.023733675479888916, 0.4941052794456482]} - {"index": {}} + {"index": {"_id": "5"}} {"source_text": "my words", "embedding":[0.19087255001068115, 0.5498749017715454, 0.9536173939704895, 0.25011056661605835, 0.37642204761505127, 0.18271470069885254, 0.670674741268158, 0.5553990006446838, 0.3306507468223572, 0.3368762731552124, 0.053364574909210205, 0.047215282917022705, 0.4221981167793274, 0.7591024041175842, 0.998794436454773, 0.6113318204879761, 0.8178470730781555, 0.8554672598838806, 0.40100908279418945, 0.6486459374427795, 0.804382860660553, 0.6775466799736023, 0.2916865944862366, 0.7019925117492676, 0.9812073707580566, 0.4414554834365845, 0.08203905820846558, 0.9167835116386414, 0.3082762360572815, 0.5454868674278259, 0.6665160655975342, 0.06828844547271729, 0.36014634370803833, 0.01810687780380249, 0.2640475630760193, 0.1856365203857422, 0.4734996557235718, 0.8153479695320129, 0.9614933133125305, 0.4851576089859009, 0.003343045711517334, 0.17352384328842163, 0.26423048973083496, 0.24217921495437622, 0.5694647431373596, 0.8538861274719238, 0.06464511156082153, 0.038984060287475586, 0.7695011496543884, 0.008188009262084961, 0.3858819007873535, 0.7950196862220764, 0.7225212454795837, 0.3982154130935669, 0.4996080994606018, 0.28709208965301514, 0.6753579378128052, 0.6779837608337402, 0.4815831184387207, 0.27917128801345825, 0.8400004506111145, 0.9022405743598938, 0.8253144025802612, 0.6251398324966431, 0.25444501638412476, 0.7694959044456482, 0.006821691989898682, 0.7958594560623169, 0.9144708514213562, 0.8688076138496399, 0.9641174077987671, 0.44437146186828613, 0.06135892868041992, 0.2638128399848938, 0.05436718463897705, 0.9926314353942871, 0.8661795854568481, 0.9176243543624878, 0.5521496534347534, 0.6017677783966064, 0.22096896171569824, 0.7030748128890991, 0.16923701763153076, 0.8178754448890686, 0.47008246183395386, 0.28875309228897095, 0.14314061403274536, 0.3431167006492615, 0.9301973581314087, 0.5416158437728882, 0.563427209854126, 0.7897542119026184, 0.2761036157608032, 0.16855067014694214, 0.42684781551361084, 0.7562968730926514, 0.2551668882369995, 0.7754542827606201, 0.218039870262146, 0.7080662846565247]} - do: headers: @@ -106,23 +106,22 @@ setup: --- "Test vector search with query_vector_builder": - skip: - version: all - reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/106650 -# version: " - 8.13.99" -# reason: "introduced after 8.13" + version: " - 8.13.99" + reason: "introduced after 8.13" - do: search: index: index-with-embedded-text body: + size: 3 query: knn: field: embedding - num_candidates: 3 query_vector_builder: text_embedding: model_id: text_embedding_model model_text: "the octopus comforter smells" - - match: { hits.total.value: 3 } + - length: { hits.hits: 3 } + - match: { hits.hits.0._id: "0" } --- "nested kNN search with inner_hits size": From 12441f505ac108230e5e06a94d29e1f89278f7b9 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 25 Mar 2024 15:47:54 +0100 Subject: [PATCH 156/214] Reduce InternalComposite in a streaming fashion (#106566) Use a priority queue with a hashmap to keep track of the competitive buckets. We still delayed the merging of the child aggregations by introducing a DelayedMultiBucketAggregatorsReducer. --- .../DelayedMultiBucketAggregatorsReducer.java | 73 ++++++++ .../bucket/MultiBucketAggregatorsReducer.java | 2 +- .../bucket/composite/InternalComposite.java | 157 ++++++++++-------- 3 files changed, 159 insertions(+), 73 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java new file mode 100644 index 0000000000000..7fc7c96badaaa --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.AggregatorsReducer; +import org.elasticsearch.search.aggregations.InternalAggregations; + +import java.util.ArrayList; +import java.util.List; + +/** + * Class for reducing a list of {@link MultiBucketsAggregation.Bucket} to a single + * {@link InternalAggregations} and the number of documents in a delayable fashion. + * + * This class can be reused by calling {@link #reset()}. + * + * @see MultiBucketAggregatorsReducer + */ +public final class DelayedMultiBucketAggregatorsReducer { + + private final AggregationReduceContext context; + // the maximum size of this array is the number of shards to be reduced. We currently do it in a batches of 256 + // if we expect bigger batches, we might consider to use ObjectArray. + private final List internalAggregations; + private long count = 0; + + public DelayedMultiBucketAggregatorsReducer(AggregationReduceContext context) { + this.context = context; + this.internalAggregations = new ArrayList<>(); + } + + /** + * Adds a {@link MultiBucketsAggregation.Bucket} for reduction. + */ + public void accept(MultiBucketsAggregation.Bucket bucket) { + count += bucket.getDocCount(); + internalAggregations.add(bucket.getAggregations()); + } + + /** + * Reset the content of this reducer. + */ + public void reset() { + count = 0L; + internalAggregations.clear(); + } + + /** + * returns the reduced {@link InternalAggregations}. + */ + public InternalAggregations get() { + try (AggregatorsReducer aggregatorsReducer = new AggregatorsReducer(context, internalAggregations.size())) { + for (InternalAggregations agg : internalAggregations) { + aggregatorsReducer.accept(agg); + } + return aggregatorsReducer.get(); + } + } + + /** + * returns the number of docs + */ + public long getDocCount() { + return count; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java index 176ca2f918fff..e7d0e6a17e4c6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java @@ -21,7 +21,7 @@ public final class MultiBucketAggregatorsReducer implements Releasable { private final AggregatorsReducer aggregatorsReducer; - long count = 0; + private long count = 0; public MultiBucketAggregatorsReducer(AggregationReduceContext context, int size) { this.aggregatorsReducer = new AggregatorsReducer(context, size); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 31cd5c9426755..fc13dcb6a22ee 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; @@ -20,6 +22,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.DelayedMultiBucketAggregatorsReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -201,56 +204,29 @@ int[] getReverseMuls() { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectArrayPriorityQueue pq = new ObjectArrayPriorityQueue<>(size, reduceContext.bigArrays()) { - @Override - protected boolean lessThan(BucketIterator a, BucketIterator b) { - return a.compareTo(b) < 0; - } - }; + final BucketsQueue queue = new BucketsQueue(reduceContext); boolean earlyTerminated = false; @Override public void accept(InternalAggregation aggregation) { InternalComposite sortedAgg = (InternalComposite) aggregation; earlyTerminated |= sortedAgg.earlyTerminated; - BucketIterator it = new BucketIterator(sortedAgg.buckets); - if (it.next() != null) { - pq.add(it); + for (InternalBucket bucket : sortedAgg.getBuckets()) { + if (queue.add(bucket) == false) { + // if the bucket is not competitive, we can break + // because incoming buckets are sorted + break; + } } } @Override public InternalAggregation get() { - InternalBucket lastBucket = null; - List buckets = new ArrayList<>(); - List result = new ArrayList<>(); - while (pq.size() > 0) { - BucketIterator bucketIt = pq.top(); - if (lastBucket != null && bucketIt.current.compareKey(lastBucket) != 0) { - InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); - buckets.clear(); - result.add(reduceBucket); - if (result.size() >= getSize()) { - break; - } - } - lastBucket = bucketIt.current; - buckets.add(bucketIt.current); - if (bucketIt.next() != null) { - pq.updateTop(); - } else { - pq.pop(); - } - } - if (buckets.size() > 0) { - InternalBucket reduceBucket = reduceBucket(buckets, reduceContext); - result.add(reduceBucket); - } - + final List result = queue.get(); List reducedFormats = formats; CompositeKey lastKey = null; - if (result.size() > 0) { - lastBucket = result.get(result.size() - 1); + if (result.isEmpty() == false) { + InternalBucket lastBucket = result.get(result.size() - 1); /* Attach the formats from the last bucket to the reduced composite * so that we can properly format the after key. */ reducedFormats = lastBucket.formats; @@ -275,11 +251,82 @@ public InternalAggregation get() { @Override public void close() { - Releasables.close(pq); + Releasables.close(queue); } }; } + private class BucketsQueue implements Releasable { + private final ObjectObjectPagedHashMap bucketReducers; + private final ObjectArrayPriorityQueue queue; + private final AggregationReduceContext reduceContext; + + private BucketsQueue(AggregationReduceContext reduceContext) { + this.reduceContext = reduceContext; + bucketReducers = new ObjectObjectPagedHashMap<>(getSize(), reduceContext.bigArrays()); + queue = new ObjectArrayPriorityQueue<>(getSize(), reduceContext.bigArrays()) { + @Override + protected boolean lessThan(InternalBucket a, InternalBucket b) { + return b.compareKey(a) < 0; + } + }; + } + + /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ + boolean add(InternalBucket bucket) { + DelayedMultiBucketAggregatorsReducer delayed = bucketReducers.get(bucket.key); + if (delayed == null) { + final InternalBucket out = queue.insertWithOverflow(bucket); + if (out == null) { + // bucket is added + delayed = new DelayedMultiBucketAggregatorsReducer(reduceContext); + } else if (out == bucket) { + // bucket is not competitive + return false; + } else { + // bucket replaces existing bucket + delayed = bucketReducers.remove(out.key); + assert delayed != null; + delayed.reset(); + } + bucketReducers.put(bucket.key, delayed); + } + delayed.accept(bucket); + return true; + } + + /** Return the list of reduced buckets */ + List get() { + final int bucketsSize = (int) bucketReducers.size(); + final InternalBucket[] result = new InternalBucket[bucketsSize]; + for (int i = bucketsSize - 1; i >= 0; i--) { + final InternalBucket bucket = queue.pop(); + assert bucket != null; + /* Use the formats from the bucket because they'll be right to format + * the key. The formats on the InternalComposite doing the reducing are + * just whatever formats make sense for *its* index. This can be real + * trouble when the index doing the reducing is unmapped. */ + final var reducedFormats = bucket.formats; + final DelayedMultiBucketAggregatorsReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); + result[i] = new InternalBucket( + sourceNames, + reducedFormats, + bucket.key, + reverseMuls, + missingOrders, + reducer.getDocCount(), + reducer.get() + ); + } + return List.of(result); + } + + @Override + public void close() { + Releasables.close(bucketReducers, queue); + } + } + @Override public InternalAggregation finalizeSampling(SamplingContext samplingContext) { return new InternalComposite( @@ -296,22 +343,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { ); } - private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.isEmpty() == false; - long docCount = 0; - for (InternalBucket bucket : buckets) { - docCount += bucket.docCount; - } - final List aggregations = new BucketAggregationList<>(buckets); - final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - /* Use the formats from the bucket because they'll be right to format - * the key. The formats on the InternalComposite doing the reducing are - * just whatever formats make sense for *its* index. This can be real - * trouble when the index doing the reducing is unmapped. */ - final var reducedFormats = buckets.get(0).formats; - return new InternalBucket(sourceNames, reducedFormats, buckets.get(0).key, reverseMuls, missingOrders, docCount, aggs); - } - @Override public boolean equals(Object obj) { if (this == obj) return true; @@ -331,24 +362,6 @@ public int hashCode() { return Objects.hash(super.hashCode(), size, buckets, afterKey, Arrays.hashCode(reverseMuls), Arrays.hashCode(missingOrders)); } - private static class BucketIterator implements Comparable { - final Iterator it; - InternalBucket current; - - private BucketIterator(List buckets) { - this.it = buckets.iterator(); - } - - @Override - public int compareTo(BucketIterator other) { - return current.compareKey(other.current); - } - - InternalBucket next() { - return current = it.hasNext() ? it.next() : null; - } - } - public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements CompositeAggregation.Bucket, From a85599f125292e28aa966433a47dedae8edb9ef5 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Mar 2024 09:04:29 -0700 Subject: [PATCH 157/214] Fix missing docs in time series source reader (#106705) While working on rate aggregation, I noticed that the time series source might miss documents. Specifically, when we reach the max_page_size limit and the tsid changes, we skip that document because we call nextDoc again when resuming reading. Also, I think this operator should honor the max_page_size limit and avoid emitting pages that exceed this threshold. --- ...TimeSeriesSortedSourceOperatorFactory.java | 45 ++-- .../TimeSeriesSortedSourceOperatorTests.java | 201 +++++++----------- 2 files changed, 92 insertions(+), 154 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index b1211c8ea5ff4..f9df90da6aa2d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -228,15 +228,10 @@ protected boolean lessThan(Leaf a, Leaf b) { void consume() throws IOException { if (queue != null) { currentTsid = BytesRef.deepCopyOf(queue.top().timeSeriesHash); - boolean breakOnNextTsidChange = false; while (queue.size() > 0) { - if (remainingDocs <= 0) { + if (remainingDocs <= 0 || currentPagePos >= maxPageSize) { break; } - if (currentPagePos > maxPageSize) { - breakOnNextTsidChange = true; - } - currentPagePos++; remainingDocs--; Leaf leaf = queue.top(); @@ -244,46 +239,32 @@ void consume() throws IOException { docsBuilder.appendInt(leaf.iterator.docID()); timestampIntervalBuilder.appendLong(leaf.timestamp); tsOrdBuilder.appendInt(globalTsidOrd); + final Leaf newTop; if (leaf.nextDoc()) { // TODO: updating the top is one of the most expensive parts of this operation. // Ideally we would do this a less as possible. Maybe the top can be updated every N docs? - Leaf newTop = queue.updateTop(); - if (newTop.timeSeriesHash.equals(currentTsid) == false) { - globalTsidOrd++; - currentTsid = BytesRef.deepCopyOf(newTop.timeSeriesHash); - if (breakOnNextTsidChange) { - break; - } - } + newTop = queue.updateTop(); } else { queue.pop(); + newTop = queue.size() > 0 ? queue.top() : null; + } + if (newTop != null && newTop.timeSeriesHash.equals(currentTsid) == false) { + globalTsidOrd++; + currentTsid = BytesRef.deepCopyOf(newTop.timeSeriesHash); } } } else { - int previousTsidOrd = leaf.timeSeriesHashOrd; - boolean breakOnNextTsidChange = false; // Only one segment, so no need to use priority queue and use segment ordinals as tsid ord. while (leaf.nextDoc()) { - if (remainingDocs <= 0) { - break; - } - if (currentPagePos > maxPageSize) { - breakOnNextTsidChange = true; - } - if (breakOnNextTsidChange) { - if (previousTsidOrd != leaf.timeSeriesHashOrd) { - break; - } - } - - currentPagePos++; - remainingDocs--; - tsOrdBuilder.appendInt(leaf.timeSeriesHashOrd); timestampIntervalBuilder.appendLong(leaf.timestamp); // Don't append segment ord, because there is only one segment. docsBuilder.appendInt(leaf.iterator.docID()); - previousTsidOrd = leaf.timeSeriesHashOrd; + currentPagePos++; + remainingDocs--; + if (remainingDocs <= 0 || currentPagePos >= maxPageSize) { + break; + } } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 3b47597d6ea2f..9a5150bdf4fff 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Randomness; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; @@ -33,6 +34,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; @@ -48,15 +50,15 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.function.Function; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; public class TimeSeriesSortedSourceOperatorTests extends AnyOperatorTestCase { @@ -73,81 +75,28 @@ public void testSimple() { int numTimeSeries = 3; int numSamplesPerTS = 10; long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(1024, 1024, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); - assertThat(results, hasSize(1)); - Page page = results.get(0); - assertThat(page.getBlockCount(), equalTo(5)); - - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); - assertThat(hostnameVector.getPositionCount(), equalTo(numTimeSeries * numSamplesPerTS)); - + int maxPageSize = between(1, 1024); + List results = runDriver(1024, maxPageSize, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); + // for now we emit at most one time series each page int offset = 0; - for (int expectedTsidOrd = 0; expectedTsidOrd < numTimeSeries; expectedTsidOrd++) { - String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); - long expectedVoltage = 5L + expectedTsidOrd; - for (int j = 0; j < numSamplesPerTS; j++) { - long expectedTimestamp = timestampStart + ((numSamplesPerTS - j - 1) * 10_000L); - - assertThat(docVector.shards().getInt(offset), equalTo(0)); - assertThat(voltageVector.getLong(offset), equalTo(expectedVoltage)); - assertThat(hostnameVector.getBytesRef(offset, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); - assertThat(tsidVector.getInt(offset), equalTo(expectedTsidOrd)); - assertThat(timestampVector.getLong(offset), equalTo(expectedTimestamp)); - offset++; - } - } - } - - public void testMaxPageSize() { - int numTimeSeries = 3; - int numSamplesPerTS = 10; - long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(1024, 1, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); - // A time series shouldn't be split over multiple pages. - assertThat(results, hasSize(numTimeSeries)); - for (int i = 0; i < numTimeSeries; i++) { - Page page = results.get(i); + for (Page page : results) { assertThat(page.getBlockCount(), equalTo(5)); - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numSamplesPerTS)); - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numSamplesPerTS)); - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numSamplesPerTS)); - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numSamplesPerTS)); - BytesRefVector hostnameVector = (BytesRefVector) page.getBlock(4).asVector(); - assertThat(hostnameVector.getPositionCount(), equalTo(numSamplesPerTS)); - - int offset = 0; - int expectedTsidOrd = i; - String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); - long expectedVoltage = 5L + expectedTsidOrd; - for (int j = 0; j < numSamplesPerTS; j++) { - long expectedTimestamp = timestampStart + ((numSamplesPerTS - j - 1) * 10_000L); - - assertThat(docVector.shards().getInt(offset), equalTo(0)); - assertThat(voltageVector.getLong(offset), equalTo(expectedVoltage)); - assertThat(hostnameVector.getBytesRef(offset, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); - assertThat(tsidVector.getInt(offset), equalTo(expectedTsidOrd)); - assertThat(timestampVector.getLong(offset), equalTo(expectedTimestamp)); + for (int i = 0; i < page.getPositionCount(); i++) { + int expectedTsidOrd = offset / numSamplesPerTS; + String expectedHostname = String.format(Locale.ROOT, "host-%02d", expectedTsidOrd); + long expectedVoltage = 5L + expectedTsidOrd; + int sampleIndex = offset - expectedTsidOrd * numSamplesPerTS; + long expectedTimestamp = timestampStart + ((numSamplesPerTS - sampleIndex - 1) * 10_000L); + assertThat(docVector.shards().getInt(i), equalTo(0)); + assertThat(voltageVector.getLong(i), equalTo(expectedVoltage)); + assertThat(hostnameVector.getBytesRef(i, new BytesRef()).utf8ToString(), equalTo(expectedHostname)); + assertThat(tsidVector.getInt(i), equalTo(expectedTsidOrd)); + assertThat(timestampVector.getLong(i), equalTo(expectedTimestamp)); offset++; } } @@ -158,7 +107,7 @@ public void testLimit() { int numSamplesPerTS = 10; int limit = 1; long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - List results = runDriver(limit, 1024, randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); + List results = runDriver(limit, randomIntBetween(1, 1024), randomBoolean(), numTimeSeries, numSamplesPerTS, timestampStart); assertThat(results, hasSize(1)); Page page = results.get(0); assertThat(page.getBlockCount(), equalTo(5)); @@ -186,57 +135,67 @@ public void testLimit() { } public void testRandom() { - int numDocs = 1024; - var ctx = driverContext(); - long timestampStart = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - var timeSeriesFactory = createTimeSeriesSourceOperator(Integer.MAX_VALUE, Integer.MAX_VALUE, randomBoolean(), writer -> { - int commitEvery = 64; - long timestamp = timestampStart; - for (int i = 0; i < numDocs; i++) { - String hostname = String.format(Locale.ROOT, "host-%02d", i % 20); - int voltage = i % 5; - writeTS(writer, timestamp, new Object[] { "hostname", hostname }, new Object[] { "voltage", voltage }); - if (i % commitEvery == 0) { - writer.commit(); - } - timestamp += 10_000; + record Doc(int host, long timestamp, long metric) {} + int numDocs = between(1, 1000); + List docs = new ArrayList<>(); + Map timestamps = new HashMap<>(); + long t0 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numDocs; i++) { + int tsid = randomIntBetween(0, 9); + long timestamp = timestamps.compute(tsid, (k, curr) -> { + long t = curr != null ? curr : t0; + return t + randomIntBetween(1, 5000); + }); + docs.add(new Doc(tsid, timestamp, randomIntBetween(1, 10000))); + } + int maxPageSize = between(1, 1024); + int limit = randomBoolean() ? between(1, 100000) : Integer.MAX_VALUE; + var timeSeriesFactory = createTimeSeriesSourceOperator(limit, maxPageSize, randomBoolean(), writer -> { + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "hostname", "h" + doc.host }, new Object[] { "metric", doc.metric }); } - return numDocs; + return docs.size(); }); + DriverContext driverContext = driverContext(); List results = new ArrayList<>(); - - var voltageField = new NumberFieldMapper.NumberFieldType("voltage", NumberFieldMapper.NumberType.LONG); + var metricField = new NumberFieldMapper.NumberFieldType("metric", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( - ctx, - timeSeriesFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, voltageField, ElementType.LONG).get(ctx)), + driverContext, + timeSeriesFactory.get(driverContext), + List.of(ValuesSourceReaderOperatorTests.factory(reader, metricField, ElementType.LONG).get(driverContext)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - OperatorTestCase.assertDriverContext(ctx); - assertThat(results, hasSize(1)); - Page page = results.get(0); - assertThat(page.getBlockCount(), equalTo(4)); - - DocVector docVector = (DocVector) page.getBlock(0).asVector(); - assertThat(docVector.getPositionCount(), equalTo(numDocs)); - - IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); - assertThat(tsidVector.getPositionCount(), equalTo(numDocs)); - - LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); - assertThat(timestampVector.getPositionCount(), equalTo(numDocs)); - - LongVector voltageVector = (LongVector) page.getBlock(3).asVector(); - assertThat(voltageVector.getPositionCount(), equalTo(numDocs)); - for (int i = 0; i < page.getBlockCount(); i++) { - assertThat(docVector.shards().getInt(0), equalTo(0)); - assertThat(voltageVector.getLong(i), either(greaterThanOrEqualTo(0L)).or(lessThanOrEqualTo(4L))); - assertThat(tsidVector.getInt(i), either(greaterThanOrEqualTo(0)).or(lessThan(20))); - assertThat(timestampVector.getLong(i), greaterThanOrEqualTo(timestampStart)); + docs.sort(Comparator.comparing(Doc::host).thenComparing(Comparator.comparingLong(Doc::timestamp).reversed())); + Map hostToTsidOrd = new HashMap<>(); + timestamps.keySet().stream().sorted().forEach(n -> hostToTsidOrd.put(n, hostToTsidOrd.size())); + int offset = 0; + for (int p = 0; p < results.size(); p++) { + Page page = results.get(p); + if (p < results.size() - 1) { + assertThat(page.getPositionCount(), equalTo(maxPageSize)); + } else { + assertThat(page.getPositionCount(), lessThanOrEqualTo(limit)); + assertThat(page.getPositionCount(), lessThanOrEqualTo(maxPageSize)); + } + assertThat(page.getBlockCount(), equalTo(4)); + DocVector docVector = (DocVector) page.getBlock(0).asVector(); + IntVector tsidVector = (IntVector) page.getBlock(1).asVector(); + LongVector timestampVector = (LongVector) page.getBlock(2).asVector(); + LongVector metricVector = (LongVector) page.getBlock(3).asVector(); + for (int i = 0; i < page.getPositionCount(); i++) { + Doc doc = docs.get(offset); + offset++; + assertThat(docVector.shards().getInt(0), equalTo(0)); + assertThat(tsidVector.getInt(i), equalTo(hostToTsidOrd.get(doc.host))); + assertThat(timestampVector.getLong(i), equalTo(doc.timestamp)); + assertThat(metricVector.getLong(i), equalTo(doc.metric)); + } } + assertThat(offset, equalTo(Math.min(limit, numDocs))); } @Override @@ -289,6 +248,10 @@ List runDriver(int limit, int maxPageSize, boolean forceMerge, int numTime ) ); OperatorTestCase.assertDriverContext(ctx); + for (Page result : results) { + assertThat(result.getPositionCount(), lessThanOrEqualTo(maxPageSize)); + assertThat(result.getPositionCount(), lessThanOrEqualTo(limit)); + } return results; } @@ -298,7 +261,6 @@ TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( boolean forceMerge, CheckedFunction indexingLogic ) { - int numDocs; Sort sort = new Sort( new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING, false), new SortedNumericSortField(DataStreamTimestampFieldMapper.DEFAULT_PATH, SortField.Type.LONG, true) @@ -311,23 +273,18 @@ TimeSeriesSortedSourceOperatorFactory createTimeSeriesSourceOperator( ) ) { - numDocs = indexingLogic.apply(writer); + int numDocs = indexingLogic.apply(writer); if (forceMerge) { writer.forceMerge(1); } reader = writer.getReader(); + assertThat(reader.numDocs(), equalTo(numDocs)); } catch (IOException e) { throw new UncheckedIOException(e); } var ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0); Function queryFunction = c -> new MatchAllDocsQuery(); - return TimeSeriesSortedSourceOperatorFactory.create( - Math.min(numDocs, limit), - Math.min(numDocs, maxPageSize), - 1, - List.of(ctx), - queryFunction - ); + return TimeSeriesSortedSourceOperatorFactory.create(limit, maxPageSize, 1, List.of(ctx), queryFunction); } static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { From 2196576aed458889c9fd6c0c04aed891cec3a8d9 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Mar 2024 10:26:45 -0700 Subject: [PATCH 158/214] Use confined arena for CloseableByteBuffer (#106723) The jdk implementation of CloseableByteBuffer currently uses a shared arena. The assumption was that a buffer might be shared across threads. However, in practice for compression/decompression that is not true, and the shared arena has a noticeable impact on deallocation when the buffer is closed. This commit switches to a confined arena, limtting buffer creation and compress/decompress calls to a single thread. relates #103374 --- .../elasticsearch/nativeaccess/CloseableByteBuffer.java | 9 +++++++++ .../org/elasticsearch/nativeaccess/NativeAccess.java | 6 ++++++ .../nativeaccess/jdk/JdkCloseableByteBuffer.java | 2 +- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java index aa5d94080afa9..6590aff307cc6 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/CloseableByteBuffer.java @@ -10,7 +10,16 @@ import java.nio.ByteBuffer; +/** + * A wrapper around a native {@link ByteBuffer} which allows that buffer to be + * closed synchronously. This is in contrast to JDK created native buffers + * which are deallocated only after GC has cleaned up references to + * the buffer. + */ public interface CloseableByteBuffer extends AutoCloseable { + /** + * Returns the wrapped {@link ByteBuffer}. + */ ByteBuffer buffer(); @Override diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java index 5b2be93dadc1f..f7019a4fd7a96 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/NativeAccess.java @@ -35,5 +35,11 @@ static NativeAccess instance() { */ Zstd getZstd(); + /** + * Creates a new {@link CloseableByteBuffer}. The buffer must be used within the same thread + * that it is created. + * @param len the number of bytes the buffer should allocate + * @return the buffer + */ CloseableByteBuffer newBuffer(int len); } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java index d802fd8be7a67..97e6bf2f5580a 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java @@ -18,7 +18,7 @@ class JdkCloseableByteBuffer implements CloseableByteBuffer { private final ByteBuffer bufferView; JdkCloseableByteBuffer(int len) { - this.arena = Arena.ofShared(); + this.arena = Arena.ofConfined(); this.bufferView = this.arena.allocate(len).asByteBuffer(); } From 092f95e06e7b6ad123dc45f3ae31e7a46703a010 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 25 Mar 2024 18:25:24 +0000 Subject: [PATCH 159/214] [ML] Always update tokenisation options for chunked inference (#106718) Fixes an issue where if the defaults were used the input was truncated --- .../ElasticsearchInternalService.java | 4 +- .../services/elser/ElserInternalService.java | 7 ++- .../ElasticsearchInternalServiceTests.java | 60 ++++++++++++++++++ .../elser/ElserInternalServiceTests.java | 61 +++++++++++++++++++ 4 files changed, 127 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index a07ebe56a9258..02090ee84e708 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -251,9 +251,9 @@ public void chunkedInfer( return; } - var configUpdate = chunkingOptions.settingsArePresent() + var configUpdate = chunkingOptions != null ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : TextEmbeddingConfigUpdate.EMPTY_INSTANCE; + : new TokenizationConfigUpdate(null, null); var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index 5069724697818..bb88193612ff4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -15,6 +15,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; @@ -288,7 +289,7 @@ public void chunkedInfer( List input, Map taskSettings, InputType inputType, - ChunkingOptions chunkingOptions, + @Nullable ChunkingOptions chunkingOptions, ActionListener> listener ) { try { @@ -298,9 +299,9 @@ public void chunkedInfer( return; } - var configUpdate = chunkingOptions.settingsArePresent() + var configUpdate = chunkingOptions != null ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : TextExpansionConfigUpdate.EMPTY_UPDATE; + : new TokenizationConfigUpdate(null, null); var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 0757012b234bd..073712beb8050 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.util.ArrayList; @@ -38,6 +39,8 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -410,6 +413,63 @@ public void testChunkInfer() { assertTrue("Listener not called", gotResults.get()); } + @SuppressWarnings("unchecked") + public void testChunkInferSetsTokenization() { + var expectedSpan = new AtomicInteger(); + var expectedWindowSize = new AtomicReference(); + + Client client = mock(Client.class); + ThreadPool threadpool = new TestThreadPool("test"); + try { + when(client.threadPool()).thenReturn(threadpool); + doAnswer(invocationOnMock -> { + var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; + assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); + var update = (TokenizationConfigUpdate) request.getUpdate(); + assertEquals(update.getSpanSettings().span(), expectedSpan.get()); + assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); + return null; + }).when(client) + .execute( + same(InferTrainedModelDeploymentAction.INSTANCE), + any(InferTrainedModelDeploymentAction.Request.class), + any(ActionListener.class) + ); + + var model = new MultilingualE5SmallModel( + "foo", + TaskType.TEXT_EMBEDDING, + "e5", + new MultilingualE5SmallInternalServiceSettings(1, 1, "cross-platform") + ); + var service = createService(client); + + expectedSpan.set(-1); + expectedWindowSize.set(null); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + null, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + expectedSpan.set(-1); + expectedWindowSize.set(256); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(256, null), + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + } finally { + terminate(threadpool); + } + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java index f2fd195ab8c5a..dbb50260edaf1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import java.util.ArrayList; import java.util.Collections; @@ -35,6 +36,8 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -394,6 +397,64 @@ public void testChunkInfer() { assertTrue("Listener not called", gotResults.get()); } + @SuppressWarnings("unchecked") + public void testChunkInferSetsTokenization() { + var expectedSpan = new AtomicInteger(); + var expectedWindowSize = new AtomicReference(); + + ThreadPool threadpool = new TestThreadPool("test"); + Client client = mock(Client.class); + try { + when(client.threadPool()).thenReturn(threadpool); + doAnswer(invocationOnMock -> { + var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; + assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); + var update = (TokenizationConfigUpdate) request.getUpdate(); + assertEquals(update.getSpanSettings().span(), expectedSpan.get()); + assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); + return null; + }).when(client) + .execute( + same(InferTrainedModelDeploymentAction.INSTANCE), + any(InferTrainedModelDeploymentAction.Request.class), + any(ActionListener.class) + ); + + var model = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + "elser", + new ElserInternalServiceSettings(1, 1, "elser"), + new ElserMlNodeTaskSettings() + ); + var service = createService(client); + + expectedSpan.set(-1); + expectedWindowSize.set(null); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + null, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + expectedSpan.set(-1); + expectedWindowSize.set(256); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(256, null), + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + } finally { + terminate(threadpool); + } + } + private ElserInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElserInternalService(context); From 78115fbc9e9049ec99df9595eb2bb53db1365833 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Mon, 25 Mar 2024 14:27:43 -0400 Subject: [PATCH 160/214] [ESQL] Migrate BooleanFunctionEqualsElimination optimization (#106692) Relates to #105217 This copies the BooleanFunctionEqualsElimination logical optimization into ESQL, following the pattern established in #106499. I've copied the optimization rule into the ESQL version of OptimizerRules, and the tests into OpitmizerRulesTests, and changed the imports &c to point to the appropriate ESQL classes instead of their QL counterparts. I only saw two tests for this one. --- .../esql/optimizer/LogicalPlanOptimizer.java | 3 +- .../xpack/esql/optimizer/OptimizerRules.java | 32 ++++++++++++++++ .../esql/optimizer/OptimizerRulesTests.java | 38 ++++++++++++++++++- 3 files changed, 69 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index af8ad7a1fc435..93505fa4f20fc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules; -import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.ConstantFolding; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.LiteralsOnTheRight; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PruneLiteralsInOrderBy; @@ -127,7 +126,7 @@ protected static Batch operators() { // needs to occur before BinaryComparison combinations (see class) new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.PropagateEquals(), new PropagateNullable(), - new BooleanFunctionEqualsElimination(), + new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.BooleanFunctionEqualsElimination(), new org.elasticsearch.xpack.esql.optimizer.OptimizerRules.CombineDisjunctionsToIn(), new SimplifyComparisonsArithmetics(EsqlDataTypes::areCompatible), // prune/elimination diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java index 3ae662580a200..38ac596135abb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRules.java @@ -41,10 +41,12 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.expression.function.Function; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.expression.predicate.Range; import org.elasticsearch.xpack.ql.expression.predicate.logical.And; import org.elasticsearch.xpack.ql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.plan.QueryPlan; @@ -65,6 +67,7 @@ import java.util.Set; import static org.elasticsearch.xpack.ql.common.Failure.fail; +import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.combineOr; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitOr; @@ -257,6 +260,35 @@ protected Expression rule(Or or) { } } + /** + * This rule must always be placed after {@link org.elasticsearch.xpack.ql.optimizer.OptimizerRules.LiteralsOnTheRight}, since it looks + * at TRUE/FALSE literals' existence on the right hand-side of the {@link Equals}/{@link NotEquals} expressions. + */ + public static final class BooleanFunctionEqualsElimination extends + org.elasticsearch.xpack.ql.optimizer.OptimizerRules.OptimizerExpressionRule { + + BooleanFunctionEqualsElimination() { + super(org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP); + } + + @Override + protected Expression rule(BinaryComparison bc) { + if ((bc instanceof Equals || bc instanceof NotEquals) && bc.left() instanceof Function) { + // for expression "==" or "!=" TRUE/FALSE, return the expression itself or its negated variant + + // TODO: Replace use of QL Not with ESQL Not + if (TRUE.equals(bc.right())) { + return bc instanceof Equals ? bc.left() : new Not(bc.left().source(), bc.left()); + } + if (FALSE.equals(bc.right())) { + return bc instanceof Equals ? new Not(bc.left().source(), bc.left()) : bc.left(); + } + } + + return bc; + } + } + /** * Propagate Equals to eliminate conjuncted Ranges or BinaryComparisons. * When encountering a different Equals, non-containing {@link Range} or {@link BinaryComparison}, the conjunction becomes false. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 1aac8efbe6f65..01fcd222a5141 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -23,7 +23,9 @@ import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.expression.predicate.Range; import org.elasticsearch.xpack.ql.expression.predicate.logical.And; +import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.tree.Source; @@ -32,11 +34,10 @@ import java.util.List; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.ql.TestUtils.equalsOf; -import static org.elasticsearch.xpack.ql.TestUtils.nullEqualsOf; import static org.elasticsearch.xpack.ql.TestUtils.rangeOf; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; +import static org.elasticsearch.xpack.ql.expression.Literal.NULL; import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; import static org.hamcrest.Matchers.contains; @@ -182,6 +183,39 @@ public void testOrWithNonCombinableExpressions() { assertThat(in.list(), contains(ONE, THREE)); } + // Test BooleanFunctionEqualsElimination + public void testBoolEqualsSimplificationOnExpressions() { + OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), new Literal(EMPTY, 0, DataTypes.INTEGER), null); + + assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE))); + // TODO: Replace use of QL Not with ESQL Not + assertEquals(new Not(EMPTY, exp), s.rule(new Equals(EMPTY, exp, FALSE))); + } + + public void testBoolEqualsSimplificationOnFields() { + OptimizerRules.BooleanFunctionEqualsElimination s = new OptimizerRules.BooleanFunctionEqualsElimination(); + + FieldAttribute field = getFieldAttribute(); + + List comparisons = asList( + new Equals(EMPTY, field, TRUE), + new Equals(EMPTY, field, FALSE), + notEqualsOf(field, TRUE), + notEqualsOf(field, FALSE), + new Equals(EMPTY, NULL, TRUE), + new Equals(EMPTY, NULL, FALSE), + notEqualsOf(NULL, TRUE), + notEqualsOf(NULL, FALSE) + ); + + for (BinaryComparison comparison : comparisons) { + assertEquals(comparison, s.rule(comparison)); + } + } + + // Test Propagate Equals + // a == 1 AND a == 2 -> FALSE public void testDualEqualsConjunction() { FieldAttribute fa = getFieldAttribute(); From 96230f7a7d65c7dca9d86e6e65f3f961bc59cff1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 25 Mar 2024 11:30:27 -0700 Subject: [PATCH 161/214] Use CloseableByteBuffer in compress/decompress signatures (#106724) CloseableByteBuffer is backed by native memory segments, but the interfaces for compress and decompress methods of zstd take ByteBuffer. Although both Jna and the Jdk can deal with turning the native ByteBuffer back into an address to pass to the native method, the jdk may have a more significant cost to that action. This commit changes the signature of compress and decompress to take in CloseableByteBuffer so that each implementation can do its own unwrapping to get the appropriate native address. relates #103374 --- .../jna/JnaCloseableByteBuffer.java | 2 +- .../nativeaccess/jna/JnaZstdLibrary.java | 35 ++++++++++++++----- .../org/elasticsearch/nativeaccess/Zstd.java | 12 ++----- .../nativeaccess/lib/ZstdLibrary.java | 6 ++-- .../jdk/JdkCloseableByteBuffer.java | 5 ++- .../nativeaccess/jdk/JdkZstdLibrary.java | 30 +++++++++++----- .../elasticsearch/nativeaccess/ZstdTests.java | 26 +++++++------- 7 files changed, 71 insertions(+), 45 deletions(-) diff --git a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java index e47b17e234705..e987f8042691b 100644 --- a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java +++ b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaCloseableByteBuffer.java @@ -15,7 +15,7 @@ import java.nio.ByteBuffer; class JnaCloseableByteBuffer implements CloseableByteBuffer { - private final Memory memory; + final Memory memory; private final ByteBuffer bufferView; JnaCloseableByteBuffer(int len) { diff --git a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java index f0581633ea969..f2c4a85c8f2bc 100644 --- a/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java +++ b/libs/native/jna/src/main/java/org/elasticsearch/nativeaccess/jna/JnaZstdLibrary.java @@ -10,23 +10,23 @@ import com.sun.jna.Library; import com.sun.jna.Native; +import com.sun.jna.Pointer; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; import org.elasticsearch.nativeaccess.lib.ZstdLibrary; -import java.nio.ByteBuffer; - class JnaZstdLibrary implements ZstdLibrary { private interface NativeFunctions extends Library { long ZSTD_compressBound(int scrLen); - long ZSTD_compress(ByteBuffer dst, int dstLen, ByteBuffer src, int srcLen, int compressionLevel); + long ZSTD_compress(Pointer dst, int dstLen, Pointer src, int srcLen, int compressionLevel); boolean ZSTD_isError(long code); String ZSTD_getErrorName(long code); - long ZSTD_decompress(ByteBuffer dst, int dstLen, ByteBuffer src, int srcLen); + long ZSTD_decompress(Pointer dst, int dstLen, Pointer src, int srcLen); } private final NativeFunctions functions; @@ -41,8 +41,18 @@ public long compressBound(int scrLen) { } @Override - public long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel) { - return functions.ZSTD_compress(dst, dst.remaining(), src, src.remaining(), compressionLevel); + public long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel) { + assert dst instanceof JnaCloseableByteBuffer; + assert src instanceof JnaCloseableByteBuffer; + var nativeDst = (JnaCloseableByteBuffer) dst; + var nativeSrc = (JnaCloseableByteBuffer) src; + return functions.ZSTD_compress( + nativeDst.memory.share(dst.buffer().position()), + dst.buffer().remaining(), + nativeSrc.memory.share(src.buffer().position()), + src.buffer().remaining(), + compressionLevel + ); } @Override @@ -56,7 +66,16 @@ public String getErrorName(long code) { } @Override - public long decompress(ByteBuffer dst, ByteBuffer src) { - return functions.ZSTD_decompress(dst, dst.remaining(), src, src.remaining()); + public long decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { + assert dst instanceof JnaCloseableByteBuffer; + assert src instanceof JnaCloseableByteBuffer; + var nativeDst = (JnaCloseableByteBuffer) dst; + var nativeSrc = (JnaCloseableByteBuffer) src; + return functions.ZSTD_decompress( + nativeDst.memory.share(dst.buffer().position()), + dst.buffer().remaining(), + nativeSrc.memory.share(src.buffer().position()), + src.buffer().remaining() + ); } } diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java index 6a0d348d5251b..60e65383bf9a2 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/Zstd.java @@ -25,13 +25,9 @@ public final class Zstd { * Compress the content of {@code src} into {@code dst} at compression level {@code level}, and return the number of compressed bytes. * {@link ByteBuffer#position()} and {@link ByteBuffer#limit()} of both {@link ByteBuffer}s are left unmodified. */ - public int compress(ByteBuffer dst, ByteBuffer src, int level) { + public int compress(CloseableByteBuffer dst, CloseableByteBuffer src, int level) { Objects.requireNonNull(dst, "Null destination buffer"); Objects.requireNonNull(src, "Null source buffer"); - assert dst.isDirect(); - assert dst.isReadOnly() == false; - assert src.isDirect(); - assert src.isReadOnly() == false; long ret = zstdLib.compress(dst, src, level); if (zstdLib.isError(ret)) { throw new IllegalArgumentException(zstdLib.getErrorName(ret)); @@ -45,13 +41,9 @@ public int compress(ByteBuffer dst, ByteBuffer src, int level) { * Compress the content of {@code src} into {@code dst}, and return the number of decompressed bytes. {@link ByteBuffer#position()} and * {@link ByteBuffer#limit()} of both {@link ByteBuffer}s are left unmodified. */ - public int decompress(ByteBuffer dst, ByteBuffer src) { + public int decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { Objects.requireNonNull(dst, "Null destination buffer"); Objects.requireNonNull(src, "Null source buffer"); - assert dst.isDirect(); - assert dst.isReadOnly() == false; - assert src.isDirect(); - assert src.isReadOnly() == false; long ret = zstdLib.decompress(dst, src); if (zstdLib.isError(ret)) { throw new IllegalArgumentException(zstdLib.getErrorName(ret)); diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java index feb1dbe8e3d61..ea4c8efa5318a 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/lib/ZstdLibrary.java @@ -8,17 +8,17 @@ package org.elasticsearch.nativeaccess.lib; -import java.nio.ByteBuffer; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; public non-sealed interface ZstdLibrary extends NativeLibrary { long compressBound(int scrLen); - long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel); + long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel); boolean isError(long code); String getErrorName(long code); - long decompress(ByteBuffer dst, ByteBuffer src); + long decompress(CloseableByteBuffer dst, CloseableByteBuffer src); } diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java index 97e6bf2f5580a..daa012d35598e 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkCloseableByteBuffer.java @@ -11,15 +11,18 @@ import org.elasticsearch.nativeaccess.CloseableByteBuffer; import java.lang.foreign.Arena; +import java.lang.foreign.MemorySegment; import java.nio.ByteBuffer; class JdkCloseableByteBuffer implements CloseableByteBuffer { private final Arena arena; + final MemorySegment segment; private final ByteBuffer bufferView; JdkCloseableByteBuffer(int len) { this.arena = Arena.ofConfined(); - this.bufferView = this.arena.allocate(len).asByteBuffer(); + this.segment = arena.allocate(len); + this.bufferView = segment.asByteBuffer(); } @Override diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java index d193750939b23..e3e972bc19d72 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkZstdLibrary.java @@ -8,12 +8,12 @@ package org.elasticsearch.nativeaccess.jdk; +import org.elasticsearch.nativeaccess.CloseableByteBuffer; import org.elasticsearch.nativeaccess.lib.ZstdLibrary; import java.lang.foreign.FunctionDescriptor; import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; -import java.nio.ByteBuffer; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_BOOLEAN; @@ -49,11 +49,17 @@ public long compressBound(int srcLen) { } @Override - public long compress(ByteBuffer dst, ByteBuffer src, int compressionLevel) { - var nativeDst = MemorySegment.ofBuffer(dst); - var nativeSrc = MemorySegment.ofBuffer(src); + public long compress(CloseableByteBuffer dst, CloseableByteBuffer src, int compressionLevel) { + assert dst instanceof JdkCloseableByteBuffer; + assert src instanceof JdkCloseableByteBuffer; + var nativeDst = (JdkCloseableByteBuffer) dst; + var nativeSrc = (JdkCloseableByteBuffer) src; + var dstSize = dst.buffer().remaining(); + var srcSize = src.buffer().remaining(); + var segmentDst = nativeDst.segment.asSlice(dst.buffer().position(), dstSize); + var segmentSrc = nativeSrc.segment.asSlice(src.buffer().position(), srcSize); try { - return (long) compress$mh.invokeExact(nativeDst, dst.remaining(), nativeSrc, src.remaining(), compressionLevel); + return (long) compress$mh.invokeExact(segmentDst, dstSize, segmentSrc, srcSize, compressionLevel); } catch (Throwable t) { throw new AssertionError(t); } @@ -79,11 +85,17 @@ public String getErrorName(long code) { } @Override - public long decompress(ByteBuffer dst, ByteBuffer src) { - var nativeDst = MemorySegment.ofBuffer(dst); - var nativeSrc = MemorySegment.ofBuffer(src); + public long decompress(CloseableByteBuffer dst, CloseableByteBuffer src) { + assert dst instanceof JdkCloseableByteBuffer; + assert src instanceof JdkCloseableByteBuffer; + var nativeDst = (JdkCloseableByteBuffer) dst; + var nativeSrc = (JdkCloseableByteBuffer) src; + var dstSize = dst.buffer().remaining(); + var srcSize = src.buffer().remaining(); + var segmentDst = nativeDst.segment.asSlice(dst.buffer().position(), dstSize); + var segmentSrc = nativeSrc.segment.asSlice(src.buffer().position(), srcSize); try { - return (long) decompress$mh.invokeExact(nativeDst, dst.remaining(), nativeSrc, src.remaining()); + return (long) decompress$mh.invokeExact(segmentDst, dstSize, segmentSrc, srcSize); } catch (Throwable t) { throw new AssertionError(t); } diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java index d051961b06c5f..1282b1fee9206 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/ZstdTests.java @@ -41,16 +41,16 @@ public void testCompressValidation() { var srcBuf = src.buffer(); var dstBuf = dst.buffer(); - var npe1 = expectThrows(NullPointerException.class, () -> zstd.compress(null, srcBuf, 0)); + var npe1 = expectThrows(NullPointerException.class, () -> zstd.compress(null, src, 0)); assertThat(npe1.getMessage(), equalTo("Null destination buffer")); - var npe2 = expectThrows(NullPointerException.class, () -> zstd.compress(dstBuf, null, 0)); + var npe2 = expectThrows(NullPointerException.class, () -> zstd.compress(dst, null, 0)); assertThat(npe2.getMessage(), equalTo("Null source buffer")); // dst capacity too low for (int i = 0; i < srcBuf.remaining(); ++i) { srcBuf.put(i, randomByte()); } - var e = expectThrows(IllegalArgumentException.class, () -> zstd.compress(dstBuf, srcBuf, 0)); + var e = expectThrows(IllegalArgumentException.class, () -> zstd.compress(dst, src, 0)); assertThat(e.getMessage(), equalTo("Destination buffer is too small")); } } @@ -64,21 +64,21 @@ public void testDecompressValidation() { var originalBuf = original.buffer(); var compressedBuf = compressed.buffer(); - var npe1 = expectThrows(NullPointerException.class, () -> zstd.decompress(null, originalBuf)); + var npe1 = expectThrows(NullPointerException.class, () -> zstd.decompress(null, original)); assertThat(npe1.getMessage(), equalTo("Null destination buffer")); - var npe2 = expectThrows(NullPointerException.class, () -> zstd.decompress(compressedBuf, null)); + var npe2 = expectThrows(NullPointerException.class, () -> zstd.decompress(compressed, null)); assertThat(npe2.getMessage(), equalTo("Null source buffer")); // Invalid compressed format for (int i = 0; i < originalBuf.remaining(); ++i) { originalBuf.put(i, (byte) i); } - var e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(compressedBuf, originalBuf)); + var e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(compressed, original)); assertThat(e.getMessage(), equalTo("Unknown frame descriptor")); - int compressedLength = zstd.compress(compressedBuf, originalBuf, 0); + int compressedLength = zstd.compress(compressed, original, 0); compressedBuf.limit(compressedLength); - e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(restored.buffer(), compressedBuf)); + e = expectThrows(IllegalArgumentException.class, () -> zstd.decompress(restored, compressed)); assertThat(e.getMessage(), equalTo("Destination buffer is too small")); } @@ -109,9 +109,9 @@ private void doTestRoundtrip(byte[] data) { var restored = nativeAccess.newBuffer(data.length) ) { original.buffer().put(0, data); - int compressedLength = zstd.compress(compressed.buffer(), original.buffer(), randomIntBetween(-3, 9)); + int compressedLength = zstd.compress(compressed, original, randomIntBetween(-3, 9)); compressed.buffer().limit(compressedLength); - int decompressedLength = zstd.decompress(restored.buffer(), compressed.buffer()); + int decompressedLength = zstd.decompress(restored, compressed); assertThat(restored.buffer(), equalTo(original.buffer())); assertThat(decompressedLength, equalTo(data.length)); } @@ -127,15 +127,15 @@ private void doTestRoundtrip(byte[] data) { original.buffer().put(decompressedOffset, data); original.buffer().position(decompressedOffset); compressed.buffer().position(compressedOffset); - int compressedLength = zstd.compress(compressed.buffer(), original.buffer(), randomIntBetween(-3, 9)); + int compressedLength = zstd.compress(compressed, original, randomIntBetween(-3, 9)); compressed.buffer().limit(compressedOffset + compressedLength); restored.buffer().position(decompressedOffset); - int decompressedLength = zstd.decompress(restored.buffer(), compressed.buffer()); + int decompressedLength = zstd.decompress(restored, compressed); + assertThat(decompressedLength, equalTo(data.length)); assertThat( restored.buffer().slice(decompressedOffset, data.length), equalTo(original.buffer().slice(decompressedOffset, data.length)) ); - assertThat(decompressedLength, equalTo(data.length)); } } } From 49cf3cb37b352ff8d50bf893f492e041d87cf6f6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Mar 2024 12:24:45 -0700 Subject: [PATCH 162/214] Add rate aggregation function (#106703) This PR introduces a rate aggregation function to ESQL, with two main changes: 1. Define of the grouping state for the rate aggregation function: - For raw input, the function expects data to arrive in descending timestamp order without gaps; hence, perform a reduction with each incoming entry. Each grouping should consist of at most two entries: one for the starting time and one for the ending time. - For intermediate input, the function buffers data as they can arrive out of order, although non-overlapping. This shouldn't have significant issues, as we expect at most two entries per participating pipeline. - The intermediate output consists of three blocks: timestamps, values, and resets. Both timestamps and values can contain multiple entries sorted in descending order by timestamp. - This rate function does not support non-grouping aggregation. However, I can enable it if we think otherwise. 2. Modifies the GroupingAggregatorImplementer code generator to include the timestamp vector block. I explored several options to generate multiple input blocks. However, both generated and code generator are much more complicated in a generic solution. And it's unlikely that we will need another function requires multiple input blocks. Hence, I decided to tweak this class to append the timestamps long vector block when specified. --- .../compute/ann/GroupingAggregator.java | 5 + x-pack/plugin/esql/compute/build.gradle | 20 +- ...AggregatorFunctionSupplierImplementer.java | 23 +- .../compute/gen/AggregatorProcessor.java | 16 +- .../gen/GroupingAggregatorImplementer.java | 47 ++- .../aggregation/RateDoubleAggregator.java | 277 +++++++++++++++++ .../aggregation/RateIntAggregator.java | 278 +++++++++++++++++ .../aggregation/RateLongAggregator.java | 277 +++++++++++++++++ .../RateDoubleAggregatorFunctionSupplier.java | 41 +++ .../RateDoubleGroupingAggregatorFunction.java | 225 ++++++++++++++ .../RateIntAggregatorFunctionSupplier.java | 41 +++ .../RateIntGroupingAggregatorFunction.java | 225 ++++++++++++++ .../RateLongAggregatorFunctionSupplier.java | 41 +++ .../RateLongGroupingAggregatorFunction.java | 225 ++++++++++++++ .../aggregation/X-RateAggregator.java.st | 280 ++++++++++++++++++ .../TimeSeriesSortedSourceOperatorTests.java | 137 +++++++++ 16 files changed, 2145 insertions(+), 13 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java create mode 100644 x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java index bb7b2cc888c2c..7e92fc5c2734e 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/GroupingAggregator.java @@ -17,4 +17,9 @@ public @interface GroupingAggregator { IntermediateState[] value() default {}; + + /** + * If {@code true} then the @timestamp LongVector will be appended to the input blocks of the aggregation function. + */ + boolean includeTimestamps() default false; } diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index e5d076aa0e041..d04daf6631447 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -386,7 +386,25 @@ tasks.named('stringTemplates').configure { it.inputFile = valuesAggregatorInputFile it.outputFile = "org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java" } - File multivalueDedupeInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st") + + File rateAggregatorInputFile = file("src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st") + template { + it.properties = intProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateIntAggregator.java" + } + template { + it.properties = longProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateLongAggregator.java" + } + template { + it.properties = doubleProperties + it.inputFile = rateAggregatorInputFile + it.outputFile = "org/elasticsearch/compute/aggregation/RateDoubleAggregator.java" + } + + File multivalueDedupeInputFile = file("src/main/java/org/elasticsearch/compute/operator/X-MultivalueDedupe.java.st") template { it.properties = intProperties it.inputFile = multivalueDedupeInputFile diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java index a9bea3105ee10..3f031db2978f9 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorFunctionSupplierImplementer.java @@ -54,8 +54,12 @@ public AggregatorFunctionSupplierImplementer( this.groupingAggregatorImplementer = groupingAggregatorImplementer; Set createParameters = new LinkedHashSet<>(); - createParameters.addAll(aggregatorImplementer.createParameters()); - createParameters.addAll(groupingAggregatorImplementer.createParameters()); + if (aggregatorImplementer != null) { + createParameters.addAll(aggregatorImplementer.createParameters()); + } + if (groupingAggregatorImplementer != null) { + createParameters.addAll(groupingAggregatorImplementer.createParameters()); + } this.createParameters = new ArrayList<>(createParameters); this.createParameters.add(0, new Parameter(LIST_INTEGER, "channels")); @@ -84,7 +88,11 @@ private TypeSpec type() { createParameters.stream().forEach(p -> p.declareField(builder)); builder.addMethod(ctor()); - builder.addMethod(aggregator()); + if (aggregatorImplementer != null) { + builder.addMethod(aggregator()); + } else { + builder.addMethod(unsupportedNonGroupingAggregator()); + } builder.addMethod(groupingAggregator()); builder.addMethod(describe()); return builder.build(); @@ -96,6 +104,15 @@ private MethodSpec ctor() { return builder.build(); } + private MethodSpec unsupportedNonGroupingAggregator() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") + .addParameter(DRIVER_CONTEXT, "driverContext") + .returns(Types.AGGREGATOR_FUNCTION); + builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC); + builder.addStatement("throw new UnsupportedOperationException($S)", "non-grouping aggregator is not supported"); + return builder.build(); + } + private MethodSpec aggregator() { MethodSpec.Builder builder = MethodSpec.methodBuilder("aggregator") .addParameter(DRIVER_CONTEXT, "driverContext") diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index b724ee9152ca8..d07b24047b7e2 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -86,17 +86,21 @@ public boolean process(Set set, RoundEnvironment roundEnv write(aggClass, "aggregator", implementer.sourceFile(), env); } GroupingAggregatorImplementer groupingAggregatorImplementer = null; - if (aggClass.getAnnotation(Aggregator.class) != null) { - assert aggClass.getAnnotation(GroupingAggregator.class) != null; + if (aggClass.getAnnotation(GroupingAggregator.class) != null) { IntermediateState[] intermediateState = aggClass.getAnnotation(GroupingAggregator.class).value(); - if (intermediateState.length == 0) { + if (intermediateState.length == 0 && aggClass.getAnnotation(Aggregator.class) != null) { intermediateState = aggClass.getAnnotation(Aggregator.class).value(); } - - groupingAggregatorImplementer = new GroupingAggregatorImplementer(env.getElementUtils(), aggClass, intermediateState); + boolean includeTimestamps = aggClass.getAnnotation(GroupingAggregator.class).includeTimestamps(); + groupingAggregatorImplementer = new GroupingAggregatorImplementer( + env.getElementUtils(), + aggClass, + intermediateState, + includeTimestamps + ); write(aggClass, "grouping aggregator", groupingAggregatorImplementer.sourceFile(), env); } - if (implementer != null && groupingAggregatorImplementer != null) { + if (implementer != null || groupingAggregatorImplementer != null) { write( aggClass, "aggregator function supplier", diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index cc55e19b7d421..1be01f445691d 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -47,6 +47,8 @@ import static org.elasticsearch.compute.gen.Types.INT_VECTOR; import static org.elasticsearch.compute.gen.Types.LIST_AGG_FUNC_DESC; import static org.elasticsearch.compute.gen.Types.LIST_INTEGER; +import static org.elasticsearch.compute.gen.Types.LONG_BLOCK; +import static org.elasticsearch.compute.gen.Types.LONG_VECTOR; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.SEEN_GROUP_IDS; @@ -71,8 +73,14 @@ public class GroupingAggregatorImplementer { private final List createParameters; private final ClassName implementation; private final List intermediateState; + private final boolean includeTimestampVector; - public GroupingAggregatorImplementer(Elements elements, TypeElement declarationType, IntermediateState[] interStateAnno) { + public GroupingAggregatorImplementer( + Elements elements, + TypeElement declarationType, + IntermediateState[] interStateAnno, + boolean includeTimestampVector + ) { this.declarationType = declarationType; this.init = findRequiredMethod(declarationType, new String[] { "init", "initGrouping" }, e -> true); @@ -103,6 +111,7 @@ public GroupingAggregatorImplementer(Elements elements, TypeElement declarationT intermediateState = Arrays.stream(interStateAnno) .map(AggregatorImplementer.IntermediateStateDesc::newIntermediateStateDesc) .toList(); + this.includeTimestampVector = includeTimestampVector; } public ClassName implementation() { @@ -264,15 +273,24 @@ private MethodSpec prepareProcessPage() { builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", valueBlockType(init, combine)); builder.addStatement("$T valuesVector = valuesBlock.asVector()", valueVectorType(init, combine)); + if (includeTimestampVector) { + builder.addStatement("$T timestampsBlock = page.getBlock(channels.get(1))", LONG_BLOCK); + builder.addStatement("$T timestampsVector = timestampsBlock.asVector()", LONG_VECTOR); + + builder.beginControlFlow("if (timestampsVector == null) "); + builder.addStatement("throw new IllegalStateException($S)", "expected @timestamp vector; but got a block"); + builder.endControlFlow(); + } builder.beginControlFlow("if (valuesVector == null)"); + String extra = includeTimestampVector ? ", timestampsVector" : ""; { builder.beginControlFlow("if (valuesBlock.mayHaveNulls())"); builder.addStatement("state.enableGroupIdTracking(seenGroupIds)"); builder.endControlFlow(); - builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesBlock)"))); + builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesBlock$L)", extra))); } builder.endControlFlow(); - builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesVector)"))); + builder.addStatement("return $L", addInput(b -> b.addStatement("addRawInput(positionOffset, groupIds, valuesVector$L)", extra))); return builder.build(); } @@ -308,6 +326,9 @@ private MethodSpec addRawInputLoop(TypeName groupsType, TypeName valuesType) { MethodSpec.Builder builder = MethodSpec.methodBuilder(methodName); builder.addModifiers(Modifier.PRIVATE); builder.addParameter(TypeName.INT, "positionOffset").addParameter(groupsType, "groups").addParameter(valuesType, "values"); + if (includeTimestampVector) { + builder.addParameter(LONG_VECTOR, "timestamps"); + } if (valuesIsBytesRef) { // Add bytes_ref scratch var that will be used for bytes_ref blocks/vectors builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); @@ -354,6 +375,10 @@ private void combineRawInput(MethodSpec.Builder builder, String blockVariable, S combineRawInputForBytesRef(builder, blockVariable, offsetVariable); return; } + if (includeTimestampVector) { + combineRawInputWithTimestamp(builder, offsetVariable); + return; + } TypeName valueType = TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType()); if (valueType.isPrimitive() == false) { throw new IllegalArgumentException("second parameter to combine must be a primitive"); @@ -403,6 +428,22 @@ private void combineRawInputForVoid( ); } + private void combineRawInputWithTimestamp(MethodSpec.Builder builder, String offsetVariable) { + TypeName valueType = TypeName.get(combine.getParameters().get(combine.getParameters().size() - 1).asType()); + String blockType = valueType.toString().substring(0, 1).toUpperCase(Locale.ROOT) + valueType.toString().substring(1); + if (offsetVariable.contains(" + ")) { + builder.addStatement("var valuePosition = $L", offsetVariable); + offsetVariable = "valuePosition"; + } + builder.addStatement( + "$T.combine(state, groupId, timestamps.getLong($L), values.get$L($L))", + declarationType, + offsetVariable, + blockType, + offsetVariable + ); + } + private void combineRawInputForBytesRef(MethodSpec.Builder builder, String blockVariable, String offsetVariable) { // scratch is a BytesRef var that must have been defined before the iteration starts builder.addStatement("$T.combine(state, groupId, $L.getBytesRef($L, scratch))", declarationType, blockVariable, offsetVariable); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java new file mode 100644 index 0000000000000..016bf9387ca4b --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * A rate grouping aggregation definition for double. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "DOUBLE_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateDoubleAggregator { + public static DoubleRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { + // TODO: pass BlockFactory instead bigArrays so we can use the breaker + return new DoubleRateGroupingState(bigArrays, unitInMillis); + } + + public static void combine(DoubleRateGroupingState current, int groupId, long timestamp, double value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + DoubleRateGroupingState current, + int groupId, + LongBlock timestamps, + DoubleBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + DoubleRateGroupingState current, + int currentGroupId, // make the stylecheck happy + DoubleRateGroupingState state, + int statePosition + ) { + throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + } + + public static Block evaluateFinal(DoubleRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class DoubleRateState implements Accountable { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(DoubleRateState.class); + final long[] timestamps; // descending order + final double[] values; + double reset = 0; + + DoubleRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new double[initialSize]; + } + + DoubleRateState(long[] ts, double[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private double dv(double v0, double v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, double v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_USAGE; + } + } + + public static final class DoubleRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + + DoubleRateGroupingState(BigArrays bigArrays, long unitInMillis) { + this.bigArrays = bigArrays; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void append(int groupId, long timestamp, double value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new DoubleRateState(new long[] { timestamp }, new double[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + state = new DoubleRateState(new long[] { state.timestamps[0], timestamp }, new double[] { state.values[0], value }); + states.set(groupId, state); + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new DoubleRateState(valueCount); + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getDouble(firstIndex + i); + } + } else { + var newState = new DoubleRateState(state.entries() + valueCount); + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + } + state.reset += reset; + } + + void merge(DoubleRateState curr, DoubleRateState dst, int firstIndex, int rightCount, LongBlock timestamps, DoubleBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getDouble(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getDouble(firstIndex + j); + ++k; + ++j; + } + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed(); + } + + @Override + public void close() { + Releasables.close(states); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + DoubleBlock.Builder values = blockFactory.newDoubleBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (double v : state.values) { + values.appendDouble(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java new file mode 100644 index 0000000000000..fbf43f7d72c46 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * A rate grouping aggregation definition for int. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "INT_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateIntAggregator { + public static IntRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { + // TODO: pass BlockFactory instead bigArrays so we can use the breaker + return new IntRateGroupingState(bigArrays, unitInMillis); + } + + public static void combine(IntRateGroupingState current, int groupId, long timestamp, int value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + IntRateGroupingState current, + int groupId, + LongBlock timestamps, + IntBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + IntRateGroupingState current, + int currentGroupId, // make the stylecheck happy + IntRateGroupingState state, + int statePosition + ) { + throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + } + + public static Block evaluateFinal(IntRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class IntRateState implements Accountable { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(IntRateState.class); + final long[] timestamps; // descending order + final int[] values; + double reset = 0; + + IntRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new int[initialSize]; + } + + IntRateState(long[] ts, int[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private int dv(int v0, int v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, int v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_USAGE; + } + } + + public static final class IntRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + + IntRateGroupingState(BigArrays bigArrays, long unitInMillis) { + this.bigArrays = bigArrays; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void append(int groupId, long timestamp, int value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new IntRateState(new long[] { timestamp }, new int[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + state = new IntRateState(new long[] { state.timestamps[0], timestamp }, new int[] { state.values[0], value }); + states.set(groupId, state); + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new IntRateState(valueCount); + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getInt(firstIndex + i); + } + } else { + var newState = new IntRateState(state.entries() + valueCount); + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + } + state.reset += reset; + } + + void merge(IntRateState curr, IntRateState dst, int firstIndex, int rightCount, LongBlock timestamps, IntBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getInt(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getInt(firstIndex + j); + ++k; + ++j; + } + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed(); + } + + @Override + public void close() { + Releasables.close(states); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + IntBlock.Builder values = blockFactory.newIntBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (int v : state.values) { + values.appendInt(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java new file mode 100644 index 0000000000000..b5d0dfc8aabdb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * A rate grouping aggregation definition for long. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "LONG_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class RateLongAggregator { + public static LongRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { + // TODO: pass BlockFactory instead bigArrays so we can use the breaker + return new LongRateGroupingState(bigArrays, unitInMillis); + } + + public static void combine(LongRateGroupingState current, int groupId, long timestamp, long value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + LongRateGroupingState current, + int groupId, + LongBlock timestamps, + LongBlock values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + LongRateGroupingState current, + int currentGroupId, // make the stylecheck happy + LongRateGroupingState state, + int statePosition + ) { + throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + } + + public static Block evaluateFinal(LongRateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class LongRateState implements Accountable { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(LongRateState.class); + final long[] timestamps; // descending order + final long[] values; + double reset = 0; + + LongRateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new long[initialSize]; + } + + LongRateState(long[] ts, long[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private long dv(long v0, long v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, long v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_USAGE; + } + } + + public static final class LongRateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray states; + private final long unitInMillis; + private final BigArrays bigArrays; + + LongRateGroupingState(BigArrays bigArrays, long unitInMillis) { + this.bigArrays = bigArrays; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void append(int groupId, long timestamp, long value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new LongRateState(new long[] { timestamp }, new long[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + state = new LongRateState(new long[] { state.timestamps[0], timestamp }, new long[] { state.values[0], value }); + states.set(groupId, state); + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new LongRateState(valueCount); + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.getLong(firstIndex + i); + } + } else { + var newState = new LongRateState(state.entries() + valueCount); + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + } + state.reset += reset; + } + + void merge(LongRateState curr, LongRateState dst, int firstIndex, int rightCount, LongBlock timestamps, LongBlock values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.getLong(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.getLong(firstIndex + j); + ++k; + ++j; + } + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed(); + } + + @Override + public void close() { + Releasables.close(states); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + LongBlock.Builder values = blockFactory.newLongBlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for (long v : state.values) { + values.appendLong(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..8806e1ed865c2 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateDoubleAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateDoubleAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateDoubleGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of doubles"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..608221614c483 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateDoubleAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateDoubleGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.DOUBLE), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateDoubleAggregator.DoubleRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateDoubleGroupingAggregatorFunction(List channels, + RateDoubleAggregator.DoubleRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateDoubleGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateDoubleGroupingAggregatorFunction(channels, RateDoubleAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); + DoubleVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(v), values.getDouble(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, DoubleVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getDouble(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(v), values.getDouble(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, DoubleVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateDoubleAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getDouble(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + DoubleBlock values = (DoubleBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateDoubleAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateDoubleAggregator.DoubleRateGroupingState inState = ((RateDoubleGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateDoubleAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateDoubleAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..a98f0217ef90e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateIntAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateIntAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateIntGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateIntGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of ints"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..df954d92a6d2a --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateIntAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateIntGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.INT), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateIntAggregator.IntRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateIntGroupingAggregatorFunction(List channels, + RateIntAggregator.IntRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateIntGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateIntGroupingAggregatorFunction(channels, RateIntAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + IntBlock valuesBlock = page.getBlock(channels.get(0)); + IntVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, IntBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateIntAggregator.combine(state, groupId, timestamps.getLong(v), values.getInt(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, IntVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateIntAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getInt(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateIntAggregator.combine(state, groupId, timestamps.getLong(v), values.getInt(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, IntVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateIntAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getInt(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + IntBlock values = (IntBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateIntAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateIntAggregator.IntRateGroupingState inState = ((RateIntGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateIntAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateIntAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java new file mode 100644 index 0000000000000..b8100dbbe4455 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongAggregatorFunctionSupplier.java @@ -0,0 +1,41 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.util.List; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link AggregatorFunctionSupplier} implementation for {@link RateLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateLongAggregatorFunctionSupplier implements AggregatorFunctionSupplier { + private final List channels; + + private final long unitInMillis; + + public RateLongAggregatorFunctionSupplier(List channels, long unitInMillis) { + this.channels = channels; + this.unitInMillis = unitInMillis; + } + + @Override + public AggregatorFunction aggregator(DriverContext driverContext) { + throw new UnsupportedOperationException("non-grouping aggregator is not supported"); + } + + @Override + public RateLongGroupingAggregatorFunction groupingAggregator(DriverContext driverContext) { + return RateLongGroupingAggregatorFunction.create(channels, driverContext, unitInMillis); + } + + @Override + public String describe() { + return "rate of longs"; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java new file mode 100644 index 0000000000000..fb536465ed973 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -0,0 +1,225 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.compute.aggregation; + +import java.lang.Integer; +import java.lang.Override; +import java.lang.String; +import java.lang.StringBuilder; +import java.util.List; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; + +/** + * {@link GroupingAggregatorFunction} implementation for {@link RateLongAggregator}. + * This class is generated. Do not edit it. + */ +public final class RateLongGroupingAggregatorFunction implements GroupingAggregatorFunction { + private static final List INTERMEDIATE_STATE_DESC = List.of( + new IntermediateStateDesc("timestamps", ElementType.LONG), + new IntermediateStateDesc("values", ElementType.LONG), + new IntermediateStateDesc("resets", ElementType.DOUBLE) ); + + private final RateLongAggregator.LongRateGroupingState state; + + private final List channels; + + private final DriverContext driverContext; + + private final long unitInMillis; + + public RateLongGroupingAggregatorFunction(List channels, + RateLongAggregator.LongRateGroupingState state, DriverContext driverContext, + long unitInMillis) { + this.channels = channels; + this.state = state; + this.driverContext = driverContext; + this.unitInMillis = unitInMillis; + } + + public static RateLongGroupingAggregatorFunction create(List channels, + DriverContext driverContext, long unitInMillis) { + return new RateLongGroupingAggregatorFunction(channels, RateLongAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + } + + public static List intermediateStateDesc() { + return INTERMEDIATE_STATE_DESC; + } + + @Override + public int intermediateBlockCount() { + return INTERMEDIATE_STATE_DESC.size(); + } + + @Override + public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, + Page page) { + LongBlock valuesBlock = page.getBlock(channels.get(0)); + LongVector valuesVector = valuesBlock.asVector(); + LongBlock timestampsBlock = page.getBlock(channels.get(1)); + LongVector timestampsVector = timestampsBlock.asVector(); + if (timestampsVector == null) { + throw new IllegalStateException("expected @timestamp vector; but got a block"); + } + if (valuesVector == null) { + if (valuesBlock.mayHaveNulls()) { + state.enableGroupIdTracking(seenGroupIds); + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesBlock, timestampsVector); + } + }; + } + return new GroupingAggregatorFunction.AddInput() { + @Override + public void add(int positionOffset, IntBlock groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + + @Override + public void add(int positionOffset, IntVector groupIds) { + addRawInput(positionOffset, groupIds, valuesVector, timestampsVector); + } + }; + } + + private void addRawInput(int positionOffset, IntVector groups, LongBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateLongAggregator.combine(state, groupId, timestamps.getLong(v), values.getLong(v)); + } + } + } + + private void addRawInput(int positionOffset, IntVector groups, LongVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + var valuePosition = groupPosition + positionOffset; + RateLongAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getLong(valuePosition)); + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongBlock values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + if (values.isNull(groupPosition + positionOffset)) { + continue; + } + int valuesStart = values.getFirstValueIndex(groupPosition + positionOffset); + int valuesEnd = valuesStart + values.getValueCount(groupPosition + positionOffset); + for (int v = valuesStart; v < valuesEnd; v++) { + RateLongAggregator.combine(state, groupId, timestamps.getLong(v), values.getLong(v)); + } + } + } + } + + private void addRawInput(int positionOffset, IntBlock groups, LongVector values, + LongVector timestamps) { + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + if (groups.isNull(groupPosition)) { + continue; + } + int groupStart = groups.getFirstValueIndex(groupPosition); + int groupEnd = groupStart + groups.getValueCount(groupPosition); + for (int g = groupStart; g < groupEnd; g++) { + int groupId = Math.toIntExact(groups.getInt(g)); + var valuePosition = groupPosition + positionOffset; + RateLongAggregator.combine(state, groupId, timestamps.getLong(valuePosition), values.getLong(valuePosition)); + } + } + } + + @Override + public void addIntermediateInput(int positionOffset, IntVector groups, Page page) { + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + assert channels.size() == intermediateBlockCount(); + Block timestampsUncast = page.getBlock(channels.get(0)); + if (timestampsUncast.areAllValuesNull()) { + return; + } + LongBlock timestamps = (LongBlock) timestampsUncast; + Block valuesUncast = page.getBlock(channels.get(1)); + if (valuesUncast.areAllValuesNull()) { + return; + } + LongBlock values = (LongBlock) valuesUncast; + Block resetsUncast = page.getBlock(channels.get(2)); + if (resetsUncast.areAllValuesNull()) { + return; + } + DoubleVector resets = ((DoubleBlock) resetsUncast).asVector(); + assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == resets.getPositionCount(); + for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) { + int groupId = Math.toIntExact(groups.getInt(groupPosition)); + RateLongAggregator.combineIntermediate(state, groupId, timestamps, values, resets.getDouble(groupPosition + positionOffset), groupPosition + positionOffset); + } + } + + @Override + public void addIntermediateRowInput(int groupId, GroupingAggregatorFunction input, int position) { + if (input.getClass() != getClass()) { + throw new IllegalArgumentException("expected " + getClass() + "; got " + input.getClass()); + } + RateLongAggregator.LongRateGroupingState inState = ((RateLongGroupingAggregatorFunction) input).state; + state.enableGroupIdTracking(new SeenGroupIds.Empty()); + RateLongAggregator.combineStates(state, groupId, inState, position); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) { + state.toIntermediate(blocks, offset, selected, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, IntVector selected, + DriverContext driverContext) { + blocks[offset] = RateLongAggregator.evaluateFinal(state, selected, driverContext); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()).append("["); + sb.append("channels=").append(channels); + sb.append("]"); + return sb.toString(); + } + + @Override + public void close() { + state.close(); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st new file mode 100644 index 0000000000000..9ace663fec990 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -0,0 +1,280 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.ObjectArray; +import org.elasticsearch.compute.ann.GroupingAggregator; +import org.elasticsearch.compute.ann.IntermediateState; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +$if(int)$ +import org.elasticsearch.compute.data.IntBlock; +$endif$ +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +/** + * A rate grouping aggregation definition for $type$. + * This class is generated. Edit `X-RateAggregator.java.st` instead. + */ +@GroupingAggregator( + includeTimestamps = true, + value = { + @IntermediateState(name = "timestamps", type = "LONG_BLOCK"), + @IntermediateState(name = "values", type = "$TYPE$_BLOCK"), + @IntermediateState(name = "resets", type = "DOUBLE") } +) +public class Rate$Type$Aggregator { + public static $Type$RateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { + // TODO: pass BlockFactory instead bigArrays so we can use the breaker + return new $Type$RateGroupingState(bigArrays, unitInMillis); + } + + public static void combine($Type$RateGroupingState current, int groupId, long timestamp, $type$ value) { + current.append(groupId, timestamp, value); + } + + public static void combineIntermediate( + $Type$RateGroupingState current, + int groupId, + LongBlock timestamps, + $Type$Block values, + double reset, + int otherPosition + ) { + current.combine(groupId, timestamps, values, reset, otherPosition); + } + + public static void combineStates( + $Type$RateGroupingState current, + int currentGroupId, // make the stylecheck happy + $Type$RateGroupingState state, + int statePosition + ) { + throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + } + + public static Block evaluateFinal($Type$RateGroupingState state, IntVector selected, DriverContext driverContext) { + return state.evaluateFinal(selected, driverContext.blockFactory()); + } + + private static class $Type$RateState implements Accountable { + static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject($Type$RateState.class); + final long[] timestamps; // descending order + final $type$[] values; + double reset = 0; + + $Type$RateState(int initialSize) { + this.timestamps = new long[initialSize]; + this.values = new $type$[initialSize]; + } + + $Type$RateState(long[] ts, $type$[] vs) { + this.timestamps = ts; + this.values = vs; + } + + private $type$ dv($type$ v0, $type$ v1) { + // counter reset detection + return v0 > v1 ? v1 : v1 - v0; + } + + void append(long t, $type$ v) { + assert timestamps.length == 2 : "expected two timestamps; got " + timestamps.length; + assert t < timestamps[1] : "@timestamp goes backward: " + t + " >= " + timestamps[1]; + reset += dv(v, values[1]) + dv(values[1], values[0]) - dv(v, values[0]); + timestamps[1] = t; + values[1] = v; + } + + int entries() { + return timestamps.length; + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_USAGE; + } + } + + public static final class $Type$RateGroupingState implements Releasable, Accountable, GroupingAggregatorState { + private ObjectArray<$Type$RateState> states; + private final long unitInMillis; + private final BigArrays bigArrays; + + $Type$RateGroupingState(BigArrays bigArrays, long unitInMillis) { + this.bigArrays = bigArrays; + this.states = bigArrays.newObjectArray(1); + this.unitInMillis = unitInMillis; + } + + void ensureCapacity(int groupId) { + states = bigArrays.grow(states, groupId + 1); + } + + void append(int groupId, long timestamp, $type$ value) { + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new $Type$RateState(new long[] { timestamp }, new $type$[] { value }); + states.set(groupId, state); + } else { + if (state.entries() == 1) { + state = new $Type$RateState(new long[] { state.timestamps[0], timestamp }, new $type$[] { state.values[0], value }); + states.set(groupId, state); + } else { + state.append(timestamp, value); + } + } + } + + void combine(int groupId, LongBlock timestamps, $Type$Block values, double reset, int otherPosition) { + final int valueCount = timestamps.getValueCount(otherPosition); + if (valueCount == 0) { + return; + } + final int firstIndex = timestamps.getFirstValueIndex(otherPosition); + ensureCapacity(groupId); + var state = states.get(groupId); + if (state == null) { + state = new $Type$RateState(valueCount); + states.set(groupId, state); + // TODO: add bulk_copy to Block + for (int i = 0; i < valueCount; i++) { + state.timestamps[i] = timestamps.getLong(firstIndex + i); + state.values[i] = values.get$Type$(firstIndex + i); + } + } else { + var newState = new $Type$RateState(state.entries() + valueCount); + states.set(groupId, newState); + merge(state, newState, firstIndex, valueCount, timestamps, values); + } + state.reset += reset; + } + + void merge($Type$RateState curr, $Type$RateState dst, int firstIndex, int rightCount, LongBlock timestamps, $Type$Block values) { + int i = 0, j = 0, k = 0; + final int leftCount = curr.entries(); + while (i < leftCount && j < rightCount) { + final var t1 = curr.timestamps[i]; + final var t2 = timestamps.getLong(firstIndex + j); + if (t1 > t2) { + dst.timestamps[k] = t1; + dst.values[k] = curr.values[i]; + ++i; + } else { + dst.timestamps[k] = t2; + dst.values[k] = values.get$Type$(firstIndex + j); + ++j; + } + ++k; + } + if (i < leftCount) { + System.arraycopy(curr.timestamps, i, dst.timestamps, k, leftCount - i); + System.arraycopy(curr.values, i, dst.values, k, leftCount - i); + } + while (j < rightCount) { + dst.timestamps[k] = timestamps.getLong(firstIndex + j); + dst.values[k] = values.get$Type$(firstIndex + j); + ++k; + ++j; + } + } + + @Override + public long ramBytesUsed() { + return states.ramBytesUsed(); + } + + @Override + public void close() { + Releasables.close(states); + } + + @Override + public void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { + assert blocks.length >= offset + 3 : "blocks=" + blocks.length + ",offset=" + offset; + final BlockFactory blockFactory = driverContext.blockFactory(); + final int positionCount = selected.getPositionCount(); + try ( + LongBlock.Builder timestamps = blockFactory.newLongBlockBuilder(positionCount * 2); + $Type$Block.Builder values = blockFactory.new$Type$BlockBuilder(positionCount * 2); + DoubleVector.Builder resets = blockFactory.newDoubleVectorFixedBuilder(positionCount) + ) { + for (int i = 0; i < positionCount; i++) { + final var groupId = selected.getInt(i); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state != null) { + timestamps.beginPositionEntry(); + for (long t : state.timestamps) { + timestamps.appendLong(t); + } + timestamps.endPositionEntry(); + + values.beginPositionEntry(); + for ($type$ v : state.values) { + values.append$Type$(v); + } + values.endPositionEntry(); + + resets.appendDouble(state.reset); + } else { + timestamps.appendNull(); + values.appendNull(); + resets.appendDouble(0); + } + } + blocks[offset] = timestamps.build(); + blocks[offset + 1] = values.build(); + blocks[offset + 2] = resets.build().asBlock(); + } + } + + Block evaluateFinal(IntVector selected, BlockFactory blockFactory) { + int positionCount = selected.getPositionCount(); + try (DoubleBlock.Builder rates = blockFactory.newDoubleBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + final var groupId = selected.getInt(p); + final var state = groupId < states.size() ? states.get(groupId) : null; + if (state == null) { + rates.appendNull(); + continue; + } + int len = state.entries(); + long dt = state.timestamps[0] - state.timestamps[len - 1]; + if (dt == 0) { + // TODO: maybe issue warning when we don't have enough sample? + rates.appendNull(); + } else { + double reset = state.reset; + for (int i = 1; i < len; i++) { + if (state.values[i - 1] < state.values[i]) { + reset += state.values[i]; + } + } + double dv = state.values[0] - state.values[len - 1] + reset; + rates.appendDouble(dv * unitInMillis / dt); + } + } + return rates.build(); + } + } + + void enableGroupIdTracking(SeenGroupIds seenGroupIds) { + // noop - we handle the null states inside `toIntermediate` and `evaluateFinal` + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 9a5150bdf4fff..7bd3c426ae1c3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -26,8 +26,13 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongVector; @@ -35,11 +40,13 @@ import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -57,6 +64,7 @@ import java.util.Map; import java.util.function.Function; +import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -198,6 +206,135 @@ record Doc(int host, long timestamp, long metric) {} assertThat(offset, equalTo(Math.min(limit, numDocs))); } + public void testBasicRate() { + long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; + long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; + + long[] v2 = { 7, 2, 0, 11, 24, 0, 4, 1, 10, 2 }; + long[] t2 = { 1, 2, 4, 5, 6, 8, 10, 11, 12, 14 }; + + long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; + long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; + List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); + long unit = between(1, 5); + Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit)); + assertThat(actualRates, equalTo(Map.of("p1", 35.0 * unit / 111.0, "p2", 42.0 * unit / 13.0, "p3", 10.0 * unit / 20.0))); + } + + public void testRandomRate() { + int numPods = between(1, 10); + List pods = new ArrayList<>(); + Map expectedRates = new HashMap<>(); + TimeValue unit = TimeValue.timeValueSeconds(1); + for (int p = 0; p < numPods; p++) { + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + t += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = t; + } + Pod pod = new Pod("p" + p, times, values); + pods.add(pod); + if (numValues == 1) { + expectedRates.put(pod.name, null); + } else { + expectedRates.put(pod.name, pod.expectedRate(unit)); + } + } + Map actualRates = runRateTest(pods, unit); + assertThat(actualRates, equalTo(expectedRates)); + } + + record Pod(String name, long[] times, long[] values) { + Pod { + assert times.length == values.length : times.length + "!=" + values.length; + } + + double expectedRate(TimeValue unit) { + double dv = 0; + for (int i = 0; i < values.length - 1; i++) { + if (values[i + 1] < values[i]) { + dv += values[i]; + } + } + dv += (values[values.length - 1] - values[0]); + long dt = times[times.length - 1] - times[0]; + return (dv * unit.millis()) / dt; + } + } + + Map runRateTest(List pods, TimeValue unit) { + long unitInMillis = unit.millis(); + record Doc(String pod, long timestamp, long requests) { + + } + var sourceOperatorFactory = createTimeSeriesSourceOperator(Integer.MAX_VALUE, between(1, 100), randomBoolean(), writer -> { + List docs = new ArrayList<>(); + for (Pod pod : pods) { + for (int i = 0; i < pod.times.length; i++) { + docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + } + } + Randomness.shuffle(docs); + for (Doc doc : docs) { + writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + } + return docs.size(); + }); + var ctx = driverContext(); + HashAggregationOperator initialHash = new HashAggregationOperator( + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL)), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(3, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + + HashAggregationOperator finalHash = new HashAggregationOperator( + List.of(new RateLongAggregatorFunctionSupplier(List.of(1, 2, 3), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL)), + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(0, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + List results = new ArrayList<>(); + var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + var podField = new KeywordFieldMapper.KeywordFieldType("pod"); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + initialHash, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + Map rates = new HashMap<>(); + for (Page result : results) { + BytesRefBlock keysBlock = result.getBlock(0); + DoubleBlock ratesBlock = result.getBlock(1); + for (int i = 0; i < result.getPositionCount(); i++) { + rates.put(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), ratesBlock.getDouble(i)); + } + result.releaseBlocks(); + } + return rates; + } + @Override protected Operator.OperatorFactory simple() { return createTimeSeriesSourceOperator(1, 1, false, writer -> { From c1467e02c874b2c7128c0c0dafb57cd44dc01608 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 25 Mar 2024 13:02:36 -0700 Subject: [PATCH 163/214] Reload Lucene in time series source when thread changes (#106727) The timeseries source operator should reload Lucene structures, such as doc values and scorers, when the executing thread changes. --- ...TimeSeriesSortedSourceOperatorFactory.java | 35 +++++++++++++++---- .../TimeSeriesSortedSourceOperatorTests.java | 2 +- 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index f9df90da6aa2d..ad884538ac85f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -228,6 +228,9 @@ protected boolean lessThan(Leaf a, Leaf b) { void consume() throws IOException { if (queue != null) { currentTsid = BytesRef.deepCopyOf(queue.top().timeSeriesHash); + if (queue.size() > 0) { + queue.top().reinitializeIfNeeded(Thread.currentThread()); + } while (queue.size() > 0) { if (remainingDocs <= 0 || currentPagePos >= maxPageSize) { break; @@ -249,12 +252,14 @@ void consume() throws IOException { newTop = queue.size() > 0 ? queue.top() : null; } if (newTop != null && newTop.timeSeriesHash.equals(currentTsid) == false) { + newTop.reinitializeIfNeeded(Thread.currentThread()); globalTsidOrd++; currentTsid = BytesRef.deepCopyOf(newTop.timeSeriesHash); } } } else { // Only one segment, so no need to use priority queue and use segment ordinals as tsid ord. + leaf.reinitializeIfNeeded(Thread.currentThread()); while (leaf.nextDoc()) { tsOrdBuilder.appendInt(leaf.timeSeriesHashOrd); timestampIntervalBuilder.appendLong(leaf.timestamp); @@ -280,37 +285,55 @@ boolean completed() { static class Leaf { private final int segmentOrd; - private final SortedDocValues tsids; - private final SortedNumericDocValues timestamps; - private final DocIdSetIterator iterator; + private final Weight weight; + private final LeafReaderContext leaf; + private SortedDocValues tsids; + private SortedNumericDocValues timestamps; + private DocIdSetIterator iterator; + private Thread createdThread; private long timestamp; private int timeSeriesHashOrd; private BytesRef timeSeriesHash; + private int docID = -1; Leaf(Weight weight, LeafReaderContext leaf) throws IOException { this.segmentOrd = leaf.ord; + this.weight = weight; + this.leaf = leaf; + this.createdThread = Thread.currentThread(); tsids = leaf.reader().getSortedDocValues("_tsid"); timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); iterator = weight.scorer(leaf).iterator(); } boolean nextDoc() throws IOException { - int docID = iterator.nextDoc(); + docID = iterator.nextDoc(); if (docID == DocIdSetIterator.NO_MORE_DOCS) { return false; } - boolean advanced = tsids.advanceExact(iterator.docID()); + boolean advanced = tsids.advanceExact(docID); assert advanced; timeSeriesHashOrd = tsids.ordValue(); timeSeriesHash = tsids.lookupOrd(timeSeriesHashOrd); - advanced = timestamps.advanceExact(iterator.docID()); + advanced = timestamps.advanceExact(docID); assert advanced; timestamp = timestamps.nextValue(); return true; } + void reinitializeIfNeeded(Thread executingThread) throws IOException { + if (executingThread != createdThread) { + tsids = leaf.reader().getSortedDocValues("_tsid"); + timestamps = leaf.reader().getSortedNumericDocValues("@timestamp"); + iterator = weight.scorer(leaf).iterator(); + if (docID != -1) { + iterator.advance(docID); + } + createdThread = executingThread; + } + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 7bd3c426ae1c3..16340909a4fd3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -144,7 +144,7 @@ public void testLimit() { public void testRandom() { record Doc(int host, long timestamp, long metric) {} - int numDocs = between(1, 1000); + int numDocs = between(1, 5000); List docs = new ArrayList<>(); Map timestamps = new HashMap<>(); long t0 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); From 61982c461f995416d004fc482ac843322db45adc Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Mon, 25 Mar 2024 10:26:00 -1000 Subject: [PATCH 164/214] Improve error message when rolling over DS alias (#106708) Currently a null pointer exception is thrown when trying to execute a rollover on a data stream alias. This commit checks before trying to execute if we're attempting to rollover a data stream alias or not. Fixes #106137 --------- Co-authored-by: James Baiera --- docs/changelog/106708.yaml | 6 +++ .../rollover/TransportRolloverAction.java | 10 +++++ .../TransportRolloverActionTests.java | 41 +++++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100644 docs/changelog/106708.yaml diff --git a/docs/changelog/106708.yaml b/docs/changelog/106708.yaml new file mode 100644 index 0000000000000..b8fdd37e5f03f --- /dev/null +++ b/docs/changelog/106708.yaml @@ -0,0 +1,6 @@ +pr: 106708 +summary: Improve error message when rolling over DS alias +area: Data streams +type: bug +issues: + - 106137 diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index c295ccde01623..774bfae53fb94 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -215,6 +215,16 @@ protected void masterOperation( } } + final IndexAbstraction rolloverTargetAbstraction = clusterState.metadata() + .getIndicesLookup() + .get(rolloverRequest.getRolloverTarget()); + if (rolloverTargetAbstraction.getType() == IndexAbstraction.Type.ALIAS && rolloverTargetAbstraction.isDataStreamRelated()) { + listener.onFailure( + new IllegalStateException("Aliases to data streams cannot be rolled over. Please rollover the data stream itself.") + ); + return; + } + IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getRolloverTarget()) .clear() .indicesOptions(IndicesOptions.fromOptions(true, false, true, true)) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 814cff37e0708..db156f983220e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -552,6 +552,47 @@ public void testLazyRolloverFails() throws Exception { } } + public void testRolloverAliasToDataStreamFails() throws Exception { + final IndexMetadata backingIndexMetadata = IndexMetadata.builder(".ds-logs-ds-000001") + .settings(settings(IndexVersion.current())) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + final DataStream dataStream = new DataStream( + "logs-ds", + List.of(backingIndexMetadata.getIndex()), + 1, + Map.of(), + false, + false, + false, + false, + IndexMode.STANDARD + ); + Metadata.Builder metadataBuilder = Metadata.builder().put(backingIndexMetadata, false).put(dataStream); + metadataBuilder.put("ds-alias", dataStream.getName(), true, null); + final ClusterState stateBefore = ClusterState.builder(ClusterName.DEFAULT).metadata(metadataBuilder).build(); + + final TransportRolloverAction transportRolloverAction = new TransportRolloverAction( + mock(TransportService.class), + mockClusterService, + mockThreadPool, + mockActionFilters, + mockIndexNameExpressionResolver, + rolloverService, + mockClient, + mockAllocationService, + mockMetadataDataStreamService, + dataStreamAutoShardingService + ); + + final PlainActionFuture future = new PlainActionFuture<>(); + RolloverRequest rolloverRequest = new RolloverRequest("ds-alias", null); + transportRolloverAction.masterOperation(mock(CancellableTask.class), rolloverRequest, stateBefore, future); + IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, future::actionGet); + assertThat(illegalStateException.getMessage(), containsString("Aliases to data streams cannot be rolled over.")); + } + private IndicesStatsResponse createIndicesStatResponse(String indexName, long totalDocs, long primariesDocs) { final CommonStats primaryStats = mock(CommonStats.class); when(primaryStats.getDocs()).thenReturn(new DocsStats(primariesDocs, 0, between(1, 10000))); From dc40eef38b0eee58ae1ea9572532fbd136413308 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 25 Mar 2024 22:16:00 +0100 Subject: [PATCH 165/214] [Inference API] OpenAI Completions API (#106476) --- .../org/elasticsearch/TransportVersions.java | 1 + .../org/elasticsearch/inference/TaskType.java | 1 + .../results/ChatCompletionResults.java | 124 ++++++++ .../action/openai/OpenAiActionCreator.java | 8 + .../action/openai/OpenAiActionVisitor.java | 3 + .../openai/OpenAiChatCompletionAction.java | 60 ++++ ...nAiCompletionExecutableRequestCreator.java | 62 ++++ .../OpenAiChatCompletionResponseHandler.java | 25 ++ .../openai/OpenAiResponseHandler.java | 7 +- .../openai/OpenAiChatCompletionRequest.java | 96 ++++++ .../OpenAiChatCompletionRequestEntity.java | 72 +++++ .../openai/OpenAiEmbeddingsRequest.java | 2 +- .../request/openai/OpenAiRequest.java | 12 + .../external/request/openai/OpenAiUtils.java | 4 + .../OpenAiChatCompletionResponseEntity.java | 102 ++++++ .../services/openai/OpenAiService.java | 11 +- .../services/openai/OpenAiServiceFields.java | 14 + .../completion/OpenAiChatCompletionModel.java | 84 +++++ ...enAiChatCompletionRequestTaskSettings.java | 52 +++ .../OpenAiChatCompletionServiceSettings.java | 176 +++++++++++ .../OpenAiChatCompletionTaskSettings.java | 106 +++++++ .../openai/OpenAiActionCreatorTests.java | 264 ++++++++++++++++ .../OpenAiChatCompletionActionTests.java | 297 ++++++++++++++++++ ...nAiChatCompletionResponseHandlerTests.java | 68 ++++ ...penAiChatCompletionRequestEntityTests.java | 53 ++++ .../OpenAiChatCompletionRequestTests.java | 132 ++++++++ ...enAiChatCompletionResponseEntityTests.java | 215 +++++++++++++ .../results/ChatCompletionResultsTests.java | 117 +++++++ .../services/openai/OpenAiServiceTests.java | 63 ++++ .../OpenAiChatCompletionModelTests.java | 66 ++++ ...hatCompletionRequestTaskSettingsTests.java | 47 +++ ...nAiChatCompletionServiceSettingsTests.java | 193 ++++++++++++ ...OpenAiChatCompletionTaskSettingsTests.java | 86 +++++ 33 files changed, 2620 insertions(+), 3 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b5070c5cbd065..c23d961119a74 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -151,6 +151,7 @@ static TransportVersion def(int id) { public static final TransportVersion AUTO_SHARDING_ROLLOVER_CONDITION = def(8_611_00_0); public static final TransportVersion KNN_QUERY_VECTOR_BUILDER = def(8_612_00_0); public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0); + public static final TransportVersion ML_COMPLETION_INFERENCE_SERVICE_ADDED = def(8_614_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 5afedee873145..1e301ad796e90 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -21,6 +21,7 @@ public enum TaskType implements Writeable { TEXT_EMBEDDING, SPARSE_EMBEDDING, + COMPLETION, ANY { @Override public boolean isAnyOrSame(TaskType other) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java new file mode 100644 index 0000000000000..50ca46d85190f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChatCompletionResults.java @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Writes a chat completion result in the following json format: + * { + * "completion": [ + * { + * "result": "some result 1" + * }, + * { + * "result": "some result 2" + * } + * ] + * } + * + */ +public record ChatCompletionResults(List results) implements InferenceServiceResults { + + public static final String NAME = "chat_completion_service_results"; + public static final String COMPLETION = TaskType.COMPLETION.name().toLowerCase(Locale.ROOT); + + public ChatCompletionResults(StreamInput in) throws IOException { + this(in.readCollectionAsList(Result::new)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(COMPLETION); + for (Result result : results) { + result.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(results); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public List transformToCoordinationFormat() { + throw new UnsupportedOperationException(); + } + + @Override + public List transformToLegacyFormat() { + throw new UnsupportedOperationException(); + } + + public List getResults() { + return results; + } + + @Override + public Map asMap() { + Map map = new LinkedHashMap<>(); + map.put(COMPLETION, results.stream().map(Result::asMap).collect(Collectors.toList())); + + return map; + } + + public record Result(String content) implements Writeable, ToXContentObject { + + public static final String RESULT = "result"; + + public Result(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(content); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(RESULT, content); + builder.endObject(); + + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + public Map asMap() { + return Map.of(RESULT, content); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java index 94583c634fb26..dc89240862e6a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.Map; @@ -33,4 +34,11 @@ public ExecutableAction create(OpenAiEmbeddingsModel model, Map return new OpenAiEmbeddingsAction(sender, overriddenModel, serviceComponents); } + + @Override + public ExecutableAction create(OpenAiChatCompletionModel model, Map taskSettings) { + var overriddenModel = OpenAiChatCompletionModel.of(model, taskSettings); + + return new OpenAiChatCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java index 52d9f2e2132a7..0f26e054d734b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.openai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.Map; public interface OpenAiActionVisitor { ExecutableAction create(OpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(OpenAiChatCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java new file mode 100644 index 0000000000000..31fd6a35ef26b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.openai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionExecutableRequestCreator; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class OpenAiChatCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final OpenAiCompletionExecutableRequestCreator requestCreator; + + private final Sender sender; + + public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new OpenAiCompletionExecutableRequestCreator(model); + this.errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI chat completions"); + } + + @Override + public void execute(List input, ActionListener listener) { + if (input.size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, input, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java new file mode 100644 index 0000000000000..44ab670843335 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionExecutableRequestCreator.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.external.openai.OpenAiChatCompletionResponseHandler; +import org.elasticsearch.xpack.inference.external.request.openai.OpenAiChatCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.openai.OpenAiChatCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class OpenAiCompletionExecutableRequestCreator implements ExecutableRequestCreator { + + private static final Logger logger = LogManager.getLogger(OpenAiCompletionExecutableRequestCreator.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final OpenAiChatCompletionModel model; + + private final OpenAiAccount account; + + public OpenAiCompletionExecutableRequestCreator(OpenAiChatCompletionModel model) { + this.model = Objects.requireNonNull(model); + this.account = new OpenAiAccount( + this.model.getServiceSettings().uri(), + this.model.getServiceSettings().organizationId(), + this.model.getSecretSettings().apiKey() + ); + } + + @Override + public Runnable create( + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + OpenAiChatCompletionRequest request = new OpenAiChatCompletionRequest(account, input, model); + + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + + private static ResponseHandler createCompletionHandler() { + return new OpenAiChatCompletionResponseHandler("openai completion", OpenAiChatCompletionResponseEntity::fromResponse); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java new file mode 100644 index 0000000000000..5924356e610a3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandler.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.Request; + +public class OpenAiChatCompletionResponseHandler extends OpenAiResponseHandler { + public OpenAiChatCompletionResponseHandler(String requestType, ResponseParser parseFunction) { + super(requestType, parseFunction); + } + + @Override + RetryException buildExceptionHandling429(Request request, HttpResult result) { + // We don't retry, if the chat completion input is too large + return new RetryException(false, buildError(RATE_LIMIT, request, result)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 10083d3fd4667..db7ca8d6bdc63 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -35,6 +35,7 @@ public class OpenAiResponseHandler extends BaseResponseHandler { static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; static final String CONTENT_TOO_LARGE_MESSAGE = "Please reduce your prompt; or completion length."; + static final String OPENAI_SERVER_BUSY = "Received a server busy error status code"; public OpenAiResponseHandler(String requestType, ResponseParser parseFunction) { @@ -70,7 +71,7 @@ void checkForFailureStatusCode(Request request, HttpResult result) throws RetryE } else if (statusCode > 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { - throw new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); + throw buildExceptionHandling429(request, result); } else if (isContentTooLarge(result)) { throw new ContentTooLargeException(buildError(CONTENT_TOO_LARGE, request, result)); } else if (statusCode == 401) { @@ -82,6 +83,10 @@ void checkForFailureStatusCode(Request request, HttpResult result) throws RetryE } } + RetryException buildExceptionHandling429(Request request, HttpResult result) { + return new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); + } + private static boolean isContentTooLarge(HttpResult result) { int statusCode = result.response().getStatusLine().getStatusCode(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java new file mode 100644 index 0000000000000..e53d4e7362735 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequest.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; + +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.buildUri; +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; + +public class OpenAiChatCompletionRequest implements OpenAiRequest { + + private final OpenAiAccount account; + private final List input; + private final URI uri; + private final OpenAiChatCompletionModel model; + + public OpenAiChatCompletionRequest(OpenAiAccount account, List input, OpenAiChatCompletionModel model) { + this.account = Objects.requireNonNull(account); + this.input = Objects.requireNonNull(input); + this.uri = buildUri(this.account.url(), "OpenAI", OpenAiChatCompletionRequest::buildDefaultUri); + this.model = Objects.requireNonNull(model); + } + + @Override + public HttpRequest createHttpRequest() { + HttpPost httpPost = new HttpPost(uri); + + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString( + new OpenAiChatCompletionRequestEntity(input, model.getServiceSettings().modelId(), model.getTaskSettings().user()) + ).getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); + + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + + var org = account.organizationId(); + if (org != null) { + httpPost.setHeader(createOrgHeader(org)); + } + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + // No truncation for OpenAI chat completions + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for OpenAI chat completions + return null; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + // default for testing + static URI buildDefaultUri() throws URISyntaxException { + return new URIBuilder().setScheme("https") + .setHost(OpenAiUtils.HOST) + .setPathSegments(OpenAiUtils.VERSION_1, OpenAiUtils.CHAT_PATH, OpenAiUtils.COMPLETIONS_PATH) + .build(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java new file mode 100644 index 0000000000000..c9aa225c77941 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntity.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public class OpenAiChatCompletionRequestEntity implements ToXContentObject { + + private static final String MESSAGES_FIELD = "messages"; + private static final String MODEL_FIELD = "model"; + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String ROLE_FIELD = "role"; + private static final String USER_FIELD = "user"; + private static final String CONTENT_FIELD = "content"; + + private final List messages; + private final String model; + + private final String user; + + public OpenAiChatCompletionRequestEntity(List messages, String model, String user) { + Objects.requireNonNull(messages); + Objects.requireNonNull(model); + + this.messages = messages; + this.model = model; + this.user = user; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + builder.endArray(); + + builder.field(MODEL_FIELD, model); + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index 9893b556e1a47..df5d3024fd483 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -28,7 +28,7 @@ import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.createOrgHeader; -public class OpenAiEmbeddingsRequest implements Request { +public class OpenAiEmbeddingsRequest implements OpenAiRequest { private final Truncator truncator; private final OpenAiAccount account; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java new file mode 100644 index 0000000000000..7a630108cfcdf --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiRequest.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.xpack.inference.external.request.Request; + +public interface OpenAiRequest extends Request {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java index a6479b3ecde25..1199f8a4f0230 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUtils.java @@ -14,6 +14,10 @@ public class OpenAiUtils { public static final String HOST = "api.openai.com"; public static final String VERSION_1 = "v1"; public static final String EMBEDDINGS_PATH = "embeddings"; + + public static final String CHAT_PATH = "chat"; + + public static final String COMPLETIONS_PATH = "completions"; public static final String ORGANIZATION_HEADER = "OpenAI-Organization"; public static Header createOrgHeader(String org) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java new file mode 100644 index 0000000000000..daf4e6578240e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntity.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.openai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class OpenAiChatCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in OpenAI chat completions response"; + + /** + * Parses the OpenAI chat completion response. + * For a request like: + * + *
      +     *     
      +     *         {
      +     *             "inputs": ["Please summarize this text: some text", "Answer the following question: Question"]
      +     *         }
      +     *     
      +     * 
      + * + * The response would look like: + * + *
      +     *     
      +     *         {
      +     *              "id": "chatcmpl-123",
      +     *              "object": "chat.completion",
      +     *              "created": 1677652288,
      +     *              "model": "gpt-3.5-turbo-0613",
      +     *              "system_fingerprint": "fp_44709d6fcb",
      +     *              "choices": [
      +     *                  {
      +     *                      "index": 0,
      +     *                      "message": {
      +     *                          "role": "assistant",
      +     *                          "content": "\n\nHello there, how may I assist you today?",
      +    *                          },
      +     *                      "logprobs": null,
      +     *                      "finish_reason": "stop"
      +     *                  }
      +     *              ],
      +     *              "usage": {
      +     *                "prompt_tokens": 9,
      +     *                "completion_tokens": 12,
      +     *                "total_tokens": 21
      +     *              }
      +     *          }
      +     *     
      +     * 
      + */ + + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 130928b17ff8d..83e5eef45fda4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettings; @@ -127,6 +128,14 @@ private static OpenAiModel createModel( secretSettings, context ); + case COMPLETION -> new OpenAiChatCompletionModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @@ -254,7 +263,7 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_12_0; + return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java new file mode 100644 index 0000000000000..1e2353f901705 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai; + +public class OpenAiServiceFields { + + public static final String USER = "user"; + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java new file mode 100644 index 0000000000000..467c4f44f34fe --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModel.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionVisitor; +import org.elasticsearch.xpack.inference.services.openai.OpenAiModel; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +public class OpenAiChatCompletionModel extends OpenAiModel { + + public static OpenAiChatCompletionModel of(OpenAiChatCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap(taskSettings); + return new OpenAiChatCompletionModel(model, OpenAiChatCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public OpenAiChatCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + OpenAiChatCompletionServiceSettings.fromMap(serviceSettings), + OpenAiChatCompletionTaskSettings.fromMap(taskSettings), + DefaultSecretSettings.fromMap(secrets) + ); + } + + OpenAiChatCompletionModel( + String modelId, + TaskType taskType, + String service, + OpenAiChatCompletionServiceSettings serviceSettings, + OpenAiChatCompletionTaskSettings taskSettings, + @Nullable DefaultSecretSettings secrets + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); + } + + private OpenAiChatCompletionModel(OpenAiChatCompletionModel originalModel, OpenAiChatCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public OpenAiChatCompletionServiceSettings getServiceSettings() { + return (OpenAiChatCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public OpenAiChatCompletionTaskSettings getTaskSettings() { + return (OpenAiChatCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(OpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java new file mode 100644 index 0000000000000..8029d8579baba --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettings.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; + +/** + * This class handles extracting OpenAI task settings from a request. The difference between this class and + * {@link OpenAiChatCompletionTaskSettings} is that this class considers all fields as optional. It will not throw an error if a field + * is missing. This allows overriding persistent task settings. + * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse + */ +public record OpenAiChatCompletionRequestTaskSettings(@Nullable String user) { + + public static final OpenAiChatCompletionRequestTaskSettings EMPTY_SETTINGS = new OpenAiChatCompletionRequestTaskSettings(null); + + /** + * Extracts the task settings from a map. All settings are considered optional and the absence of a setting + * does not throw an error. + * + * @param map the settings received from a request + * @return a {@link OpenAiChatCompletionRequestTaskSettings} + */ + public static OpenAiChatCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return OpenAiChatCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionRequestTaskSettings(user); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java new file mode 100644 index 0000000000000..0150d75b7037e --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -0,0 +1,176 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createOptionalUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; + +/** + * Defines the service settings for interacting with OpenAI's chat completion models. + */ +public class OpenAiChatCompletionServiceSettings implements ServiceSettings { + + public static final String NAME = "openai_completion_service_settings"; + + static final String ORGANIZATION = "organization_id"; + + public static OpenAiChatCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String modelId = extractRequiredString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String organizationId = extractOptionalString(map, ORGANIZATION, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); + + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionServiceSettings(modelId, uri, organizationId, maxInputTokens); + } + + private final String modelId; + + private final URI uri; + + private final String organizationId; + + private final Integer maxInputTokens; + + public OpenAiChatCompletionServiceSettings( + String modelId, + @Nullable URI uri, + @Nullable String organizationId, + @Nullable Integer maxInputTokens + ) { + this.modelId = modelId; + this.uri = uri; + this.organizationId = organizationId; + this.maxInputTokens = maxInputTokens; + } + + OpenAiChatCompletionServiceSettings( + String modelId, + @Nullable String uri, + @Nullable String organizationId, + @Nullable Integer maxInputTokens + ) { + this(modelId, createOptionalUri(uri), organizationId, maxInputTokens); + } + + public OpenAiChatCompletionServiceSettings(StreamInput in) throws IOException { + this.modelId = in.readString(); + this.uri = createOptionalUri(in.readOptionalString()); + this.organizationId = in.readOptionalString(); + this.maxInputTokens = in.readOptionalVInt(); + } + + public String modelId() { + return modelId; + } + + public URI uri() { + return uri; + } + + public String organizationId() { + return organizationId; + } + + public Integer maxInputTokens() { + return maxInputTokens; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + { + builder.field(MODEL_ID, modelId); + + if (uri != null) { + builder.field(URL, uri.toString()); + } + + if (organizationId != null) { + builder.field(ORGANIZATION, organizationId); + } + + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } + } + + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(modelId); + out.writeOptionalString(uri != null ? uri.toString() : null); + out.writeOptionalString(organizationId); + out.writeOptionalVInt(maxInputTokens); + } + + @Override + public ToXContentObject getFilteredXContentObject() { + return this; + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + OpenAiChatCompletionServiceSettings that = (OpenAiChatCompletionServiceSettings) object; + return Objects.equals(modelId, that.modelId) + && Objects.equals(uri, that.uri) + && Objects.equals(organizationId, that.organizationId) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(modelId, uri, organizationId, maxInputTokens); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java new file mode 100644 index 0000000000000..fb10d959087de --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class OpenAiChatCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "openai_completion_task_settings"; + + public static final String USER = "user"; + + public static OpenAiChatCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new OpenAiChatCompletionTaskSettings(user); + } + + private final String user; + + public OpenAiChatCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public OpenAiChatCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + public static OpenAiChatCompletionTaskSettings of( + OpenAiChatCompletionTaskSettings originalSettings, + OpenAiChatCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new OpenAiChatCompletionTaskSettings(userToUse); + } + + public String user() { + return user; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (user != null) { + builder.field(USER, user); + } + + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + OpenAiChatCompletionTaskSettings that = (OpenAiChatCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index a844061fa48e1..9b14cf259522c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -27,17 +27,21 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.openai.OpenAiChatCompletionActionTests.buildExpectedChatCompletionResultMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; import static org.hamcrest.Matchers.equalTo; @@ -283,6 +287,266 @@ public void testCreate_OpenAiEmbeddingsModel_FailsFromInvalidResponseFormat() th } } + public void testCreate_OpenAiChatCompletionModel() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap(null); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("Hello there, how may I assist you today?")))); + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertNull(request.getHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var settings = buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ); + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, settings); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "data_does_not_exist": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in OpenAI chat completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertNull(webServer.requests().get(0).getHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + assertThat(requestMap.get("n"), is(1)); + } + } + public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusCode() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java new file mode 100644 index 0000000000000..15998469d08d0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -0,0 +1,297 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.openai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockRequest; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.hamcrest.CoreMatchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class OpenAiChatCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = new HttpRequestSender.Factory(createWithEmptySettings(threadPool), clientManager, mockClusterServiceEmpty()); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0125", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "result content" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectedChatCompletionResultMap(List.of("result content")))); + assertThat(webServer.requests(), hasSize(1)); + + MockRequest request = webServer.requests().get(0); + + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(request.getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(request.getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { + try (var sender = mock(Sender.class)) { + var thrownException = expectThrows( + IllegalArgumentException.class, + () -> createAction("^^", "org", "secret", "model", "user", sender) + ); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled_WhenUrlIsNull() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any()); + + var action = createAction(null, "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send OpenAI chat completions request")); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send OpenAI chat completions request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsExceptionWithNullUrl() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any()); + + var action = createAction(null, "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("Failed to send OpenAI chat completions request")); + } + + public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo-0613", + "system_fingerprint": "fp_44709d6fcb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Hello there, how may I assist you today?" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var action = createAction(getUrl(webServer), "org", "secret", "model", "user", sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc", "def"), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), CoreMatchers.is("OpenAI completions only accepts 1 input")); + assertThat(thrownException.status(), CoreMatchers.is(RestStatus.BAD_REQUEST)); + } + } + + public static Map buildExpectedChatCompletionResultMap(List results) { + return Map.of( + ChatCompletionResults.COMPLETION, + results.stream().map(result -> Map.of(ChatCompletionResults.Result.RESULT, result)).toList() + ); + } + + private OpenAiChatCompletionAction createAction( + String url, + String org, + String apiKey, + String modelName, + @Nullable String user, + Sender sender + ) { + var model = createChatCompletionModel(url, org, apiKey, modelName, user); + + return new OpenAiChatCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java new file mode 100644 index 0000000000000..5c3585b630073 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiChatCompletionResponseHandlerTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.Header; +import org.apache.http.HeaderElement; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.request.RequestTests; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiChatCompletionResponseHandlerTests extends ESTestCase { + + public void testHandle429InputAndOutputTokensTooLarge_ThrowWithoutRetrying() { + String responseBody = """ + { + "error": { + "message": "The input or output tokens must be reduced in order to run successfully", + "type": "content_too_large", + "param": null, + "code": null + } + } + """; + ByteArrayInputStream responseBodyStream = new ByteArrayInputStream(responseBody.getBytes(StandardCharsets.UTF_8)); + + var header = mock(Header.class); + when(header.getElements()).thenReturn(new HeaderElement[] {}); + + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(429); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getFirstHeader(anyString())).thenReturn(header); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var mockRequest = RequestTests.mockRequest("id"); + var httpResult = new HttpResult(httpResponse, responseBodyStream.readAllBytes()); + var handler = new OpenAiChatCompletionResponseHandler("", (request, result) -> null); + + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(mockRequest, httpResult)); + + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + is( + "Received a rate limit status code for request from inference entity id [id] status [429]. " + + "Error message: [The input or output tokens must be reduced in order to run successfully]" + ) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java new file mode 100644 index 0000000000000..0b61bf060fc5f --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestEntityTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class OpenAiChatCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesUserWhenDefined() throws IOException { + var entity = new OpenAiChatCompletionRequestEntity(List.of("abc"), "model", "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"abc"}],"model":"model","n":1,"user":"user"}""")); + + } + + public void testXContent_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new OpenAiChatCompletionRequestEntity(List.of("abc"), "model", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"abc"}],"model":"model","n":1}""")); + } + + public void testXContent_ThrowsIfModelIsNull() { + assertThrows(NullPointerException.class, () -> new OpenAiChatCompletionRequestEntity(List.of("abc"), null, "user")); + } + + public void testXContent_ThrowsIfMessagesAreNull() { + assertThrows(NullPointerException.class, () -> new OpenAiChatCompletionRequestEntity(null, "model", "user")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java new file mode 100644 index 0000000000000..7858bdf4d1259 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiChatCompletionRequestTests.java @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.openai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiChatCompletionRequest.buildDefaultUri; +import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithUrlOrganizationUserDefined() throws IOException { + var request = createRequest("www.google.com", "org", "secret", "abc", "model", "user"); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is("www.google.com")); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertThat(httpPost.getLastHeader(ORGANIZATION_HEADER).getValue(), is("org")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithDefaultUrl() throws URISyntaxException, IOException { + var request = createRequest(null, "org", "secret", "abc", "model", "user"); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is(buildDefaultUri().toString())); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertThat(httpPost.getLastHeader(ORGANIZATION_HEADER).getValue(), is("org")); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(4)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithDefaultUrlAndWithoutUserOrganization() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abc", "model", null); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat(httpPost.getURI().toString(), is(OpenAiChatCompletionRequest.buildDefaultUri().toString())); + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer secret")); + assertNull(httpPost.getLastHeader(ORGANIZATION_HEADER)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(3)); + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abc")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testTruncate_DoesNotReduceInputTextSize() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abcd", "model", null); + var truncatedRequest = request.truncate(); + assertThat(request.getURI().toString(), is(OpenAiChatCompletionRequest.buildDefaultUri().toString())); + + var httpRequest = truncatedRequest.createHttpRequest(); + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(3)); + + // We do not truncate for OpenAi chat completions + assertThat(requestMap.get("messages"), is(List.of(Map.of("role", "user", "content", "abcd")))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("n"), is(1)); + } + + public void testTruncationInfo_ReturnsNull() { + var request = createRequest(null, null, "secret", "abcd", "model", null); + assertNull(request.getTruncationInfo()); + } + + public static OpenAiChatCompletionRequest createRequest( + @Nullable String url, + @Nullable String org, + String apiKey, + String input, + String model, + @Nullable String user + ) { + var chatCompletionModel = OpenAiChatCompletionModelTests.createChatCompletionModel(url, org, apiKey, model, user); + + var account = new OpenAiAccount( + chatCompletionModel.getServiceSettings().uri(), + org, + chatCompletionModel.getSecretSettings().apiKey() + ); + return new OpenAiChatCompletionRequest(account, List.of(input), chatCompletionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java new file mode 100644 index 0000000000000..18f702014e2d8 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.openai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class OpenAiChatCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + ChatCompletionResults chatCompletionResults = OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "not_choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in OpenAI chat completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": { + "test": { + "index": 0, + "message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + }, + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "not_message": { + "role": "assistant", + "content": "some content" + }, + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in OpenAI chat completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "id": "some-id", + "object": "chat.completion", + "created": 1705397787, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": "some content", + "logprobs": null, + "finish_reason": "stop" + }, + ], + "usage": { + "prompt_tokens": 46, + "completion_tokens": 39, + "total_tokens": 85 + }, + "system_fingerprint": null + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> OpenAiChatCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java new file mode 100644 index 0000000000000..444f6792abe63 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChatCompletionResultsTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.results; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class ChatCompletionResultsTests extends AbstractWireSerializingTestCase { + + public void testToXContent_CreateTheRightFormatForASingleChatCompletionResult() { + String resultContent = "content"; + var result = new ChatCompletionResults(List.of(new ChatCompletionResults.Result(resultContent))); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, resultContent)))) + ); + + String xContentResult = Strings.toString(result, true, true); + assertThat(xContentResult, is(""" + { + "completion" : [ + { + "result" : "content" + } + ] + }""")); + } + + public void testToXContent_CreatesTheRightFormatForMultipleCompletionResults() { + String resultOneContent = "content 1"; + String resultTwoContent = "content 2"; + + var entity = new ChatCompletionResults( + List.of(new ChatCompletionResults.Result(resultOneContent), new ChatCompletionResults.Result(resultTwoContent)) + ); + + assertThat( + entity.asMap(), + is( + Map.of( + ChatCompletionResults.COMPLETION, + List.of( + Map.of(ChatCompletionResults.Result.RESULT, resultOneContent), + Map.of(ChatCompletionResults.Result.RESULT, resultTwoContent) + ) + ) + ) + ); + + String xContentResult = Strings.toString(entity, true, true); + assertThat(xContentResult, is(""" + { + "completion" : [ + { + "result" : "content 1" + }, + { + "result" : "content 2" + } + ] + }""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return ChatCompletionResults::new; + } + + @Override + protected ChatCompletionResults createTestInstance() { + return createRandomResults(); + } + + @Override + protected ChatCompletionResults mutateInstance(ChatCompletionResults instance) throws IOException { + // if true we reduce the chat results list by a random amount, if false we add a chat result to the list + if (randomBoolean()) { + // -1 to remove at least one item from the list + int end = randomInt(instance.results().size() - 1); + return new ChatCompletionResults(instance.results().subList(0, end)); + } else { + List completionResults = new ArrayList<>(instance.results()); + completionResults.add(createRandomChatCompletionResult()); + return new ChatCompletionResults(completionResults); + } + } + + public static ChatCompletionResults createRandomResults() { + int numOfCompletionResults = randomIntBetween(1, 10); + List chatCompletionResults = new ArrayList<>(numOfCompletionResults); + + for (int i = 0; i < numOfCompletionResults; i++) { + chatCompletionResults.add(createRandomChatCompletionResult()); + } + + return new ChatCompletionResults(chatCompletionResults); + } + + private static ChatCompletionResults.Result createRandomChatCompletionResult() { + return new ChatCompletionResults.Result(randomAlphaOfLengthBetween(10, 300)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index d819b2b243872..96a5b2d48e4e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests; import org.hamcrest.MatcherAssert; @@ -119,6 +120,41 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc } } + public void testParseRequestConfig_CreatesAnOpenAiChatCompletionsModel() throws IOException { + var url = "url"; + var organization = "org"; + var model = "model"; + var user = "user"; + var secret = "secret"; + + try (var service = createOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(m -> { + assertThat(m, instanceOf(OpenAiChatCompletionModel.class)); + + var completionsModel = (OpenAiChatCompletionModel) m; + + assertThat(completionsModel.getServiceSettings().uri().toString(), is(url)); + assertThat(completionsModel.getServiceSettings().organizationId(), is(organization)); + assertThat(completionsModel.getServiceSettings().modelId(), is(model)); + assertThat(completionsModel.getTaskSettings().user(), is(user)); + assertThat(completionsModel.getSecretSettings().apiKey().toString(), is(secret)); + + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.COMPLETION, + getRequestConfigMap( + getServiceSettingsMap(model, url, organization), + getTaskSettingsMap(user), + getSecretSettingsMap(secret) + ), + Set.of(), + modelVerificationListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap( @@ -244,6 +280,33 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO } } + public void testParseRequestConfig_CreatesAnOpenAiChatCompletionsModelWithoutUserWithoutUserUrlOrganization() throws IOException { + var model = "model"; + var secret = "secret"; + + try (var service = createOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(m -> { + assertThat(m, instanceOf(OpenAiChatCompletionModel.class)); + + var completionsModel = (OpenAiChatCompletionModel) m; + assertNull(completionsModel.getServiceSettings().uri()); + assertNull(completionsModel.getServiceSettings().organizationId()); + assertThat(completionsModel.getServiceSettings().modelId(), is(model)); + assertNull(completionsModel.getTaskSettings().user()); + assertThat(completionsModel.getSecretSettings().apiKey().toString(), is(secret)); + + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.COMPLETION, + getRequestConfigMap(getServiceSettingsMap(model, null, null), getTaskSettingsMap(null), getSecretSettingsMap(secret)), + Set.of(), + modelVerificationListener + ); + } + } + public void testParseRequestConfig_MovesModel() throws IOException { try (var service = createOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap(model -> { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java new file mode 100644 index 0000000000000..efc1fcc921ef3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class OpenAiChatCompletionModelTests extends ESTestCase { + + public void testOverrideWith_OverridesUser() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + var requestTaskSettingsMap = getChatCompletionRequestTaskSettingsMap("user_override"); + + var overriddenModel = OpenAiChatCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, is(createChatCompletionModel("url", "org", "api_key", "model_name", "user_override"))); + } + + public void testOverrideWith_EmptyMap() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + + var requestTaskSettingsMap = Map.of(); + + var overriddenModel = OpenAiChatCompletionModel.of(model, requestTaskSettingsMap); + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap() { + var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + + var overriddenModel = OpenAiChatCompletionModel.of(model, null); + assertThat(overriddenModel, sameInstance(model)); + } + + public static OpenAiChatCompletionModel createChatCompletionModel( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user + ) { + return new OpenAiChatCompletionModel( + "id", + TaskType.COMPLETION, + "service", + new OpenAiChatCompletionServiceSettings(modelName, url, org, null), + new OpenAiChatCompletionTaskSettings(user), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java new file mode 100644 index 0000000000000..24632e120f94b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(settings.user()); + } + + public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "value"))); + assertNull(settings.user()); + } + + public void testFromMap_ReturnsUser() { + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user")) + ); + assertThat(settings.user(), is("user")); + } + + public static Map getChatCompletionRequestTaskSettingsMap(@Nullable String user) { + var map = new HashMap(); + + if (user != null) { + map.put(OpenAiChatCompletionTaskSettings.USER, user); + } + + return map; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java new file mode 100644 index 0000000000000..8778b2f13e746 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -0,0 +1,193 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; + +import java.io.IOException; +import java.net.URI; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var modelId = "some model"; + var url = "https://www.elastic.co"; + var org = "organization"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + ServiceFields.URL, + url, + OpenAiChatCompletionServiceSettings.ORGANIZATION, + org, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + + assertThat(serviceSettings, is(new OpenAiChatCompletionServiceSettings(modelId, ServiceUtils.createUri(url), org, maxInputTokens))); + } + + public void testFromMap_MissingUrl_DoesNotThrowException() { + var modelId = "some model"; + var organization = "org"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + OpenAiChatCompletionServiceSettings.ORGANIZATION, + organization, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + + assertNull(serviceSettings.uri()); + assertThat(serviceSettings.modelId(), is(modelId)); + assertThat(serviceSettings.organizationId(), is(organization)); + assertThat(serviceSettings.maxInputTokens(), is(maxInputTokens)); + } + + public void testFromMap_EmptyUrl_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, "", ServiceFields.MODEL_ID, "model"))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + ServiceFields.URL + ) + ) + ); + } + + public void testFromMap_MissingOrganization_DoesNotThrowException() { + var modelId = "some model"; + var maxInputTokens = 8192; + + var serviceSettings = OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.MODEL_ID, modelId, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens)) + ); + + assertNull(serviceSettings.uri()); + assertThat(serviceSettings.modelId(), is(modelId)); + assertThat(serviceSettings.maxInputTokens(), is(maxInputTokens)); + } + + public void testFromMap_EmptyOrganization_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(OpenAiChatCompletionServiceSettings.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")) + ) + ); + + assertThat( + thrownException.getMessage(), + containsString( + org.elasticsearch.common.Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + OpenAiChatCompletionServiceSettings.ORGANIZATION + ) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionServiceSettings.fromMap( + new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.MODEL_ID, "model")) + ) + ); + + assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) + ); + } + + public void testToXContent_WritesAllValues() throws IOException { + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", "url", "org", 1024); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = org.elasticsearch.common.Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model","url":"url","organization_id":"org","max_input_tokens":1024}""")); + } + + public void testToXContent_DoesNotWriteOptionalValues() throws IOException { + URI uri = null; + + var serviceSettings = new OpenAiChatCompletionServiceSettings("model", uri, null, null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + serviceSettings.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"model_id":"model"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenAiChatCompletionServiceSettings::new; + } + + @Override + protected OpenAiChatCompletionServiceSettings createTestInstance() { + return createRandomWithNonNullUrl(); + } + + @Override + protected OpenAiChatCompletionServiceSettings mutateInstance(OpenAiChatCompletionServiceSettings instance) throws IOException { + return createRandomWithNonNullUrl(); + } + + private static OpenAiChatCompletionServiceSettings createRandomWithNonNullUrl() { + return createRandom(randomAlphaOfLength(15)); + } + + private static OpenAiChatCompletionServiceSettings createRandom(String url) { + var modelId = randomAlphaOfLength(8); + var organizationId = randomFrom(randomAlphaOfLength(15), null); + var maxInputTokens = randomFrom(randomIntBetween(128, 4096), null); + + return new OpenAiChatCompletionServiceSettings(modelId, ServiceUtils.createUri(url), organizationId, maxInputTokens); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java new file mode 100644 index 0000000000000..66a9ec371eb93 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.openai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class OpenAiChatCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static OpenAiChatCompletionTaskSettings createRandomWithUser() { + return new OpenAiChatCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public void testFromMap_WithUser() { + assertEquals( + new OpenAiChatCompletionTaskSettings("user"), + OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, ""))) + ); + + assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))); + + var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of( + taskSettings, + OpenAiChatCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))); + + var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user2")) + ); + + var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new OpenAiChatCompletionTaskSettings("user2"))); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenAiChatCompletionTaskSettings::new; + } + + @Override + protected OpenAiChatCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected OpenAiChatCompletionTaskSettings mutateInstance(OpenAiChatCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} From 47dbd611b72235515f2e107d9832159143965ee6 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 26 Mar 2024 08:02:11 +0100 Subject: [PATCH 166/214] Refactor MultiBucketAggregatorsReducer and DelayedMultiBucketAggregatorsReducer (#106725) renamed to BucketReducer DelayedBucketReducer and they have a new property containing the prototype bucket --- .../adjacency/InternalAdjacencyMatrix.java | 39 ++++++++++++------- .../histogram/InternalAutoDateHistogram.java | 22 +++++------ ...egatorsReducer.java => BucketReducer.java} | 24 ++++++++---- ...Reducer.java => DelayedBucketReducer.java} | 33 ++++++++++------ .../FixedMultiBucketAggregatorsReducer.java | 27 ++++++------- .../bucket/composite/InternalComposite.java | 14 +++---- .../bucket/geogrid/InternalGeoGrid.java | 14 +++---- .../InternalVariableWidthHistogram.java | 16 ++++---- ...ongKeyedMultiBucketsAggregatorReducer.java | 16 ++++---- .../bucket/prefix/InternalIpPrefix.java | 26 ++++++------- .../terms/InternalSignificantTerms.java | 39 +++++++++++-------- 11 files changed, 151 insertions(+), 119 deletions(-) rename server/src/main/java/org/elasticsearch/search/aggregations/bucket/{MultiBucketAggregatorsReducer.java => BucketReducer.java} (69%) rename server/src/main/java/org/elasticsearch/search/aggregations/bucket/{DelayedMultiBucketAggregatorsReducer.java => DelayedBucketReducer.java} (67%) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6e70e9263df47..6bb16415adfc2 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -11,20 +11,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorReducer; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -180,28 +180,40 @@ public InternalBucket getBucketByKey(String key) { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final Map bucketsReducer = new HashMap<>(getBuckets().size()); + final ObjectObjectPagedHashMap> bucketsReducer = new ObjectObjectPagedHashMap<>( + getBuckets().size(), + reduceContext.bigArrays() + ); @Override public void accept(InternalAggregation aggregation) { final InternalAdjacencyMatrix filters = (InternalAdjacencyMatrix) aggregation; for (InternalBucket bucket : filters.buckets) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.computeIfAbsent( - bucket.key, - k -> new MultiBucketAggregatorsReducer(reduceContext, size) - ); + BucketReducer reducer = bucketsReducer.get(bucket.key); + if (reducer == null) { + reducer = new BucketReducer<>(bucket, reduceContext, size); + boolean success = false; + try { + bucketsReducer.put(bucket.key, reducer); + success = true; + } finally { + if (success == false) { + Releasables.close(reducer); + } + } + } reducer.accept(bucket); } } @Override public InternalAggregation get() { - List reducedBuckets = new ArrayList<>(bucketsReducer.size()); - for (Map.Entry entry : bucketsReducer.entrySet()) { - if (entry.getValue().getDocCount() >= 1) { - reducedBuckets.add(new InternalBucket(entry.getKey(), entry.getValue().getDocCount(), entry.getValue().get())); + List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); + bucketsReducer.forEach(entry -> { + if (entry.value.getDocCount() >= 1) { + reducedBuckets.add(new InternalBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations())); } - } + }); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); reducedBuckets.sort(Comparator.comparing(InternalBucket::getKey)); return new InternalAdjacencyMatrix(name, reducedBuckets, getMetadata()); @@ -209,7 +221,8 @@ public InternalAggregation get() { @Override public void close() { - Releasables.close(bucketsReducer.values()); + bucketsReducer.forEach(entry -> Releasables.close(entry.value)); + Releasables.close(bucketsReducer); } }; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index f0f7984079d97..ab531b69be947 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -21,7 +21,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -444,7 +444,7 @@ static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, Rou @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - private final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( + private final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( getBuckets().size(), reduceContext.bigArrays() ); @@ -460,9 +460,9 @@ public void accept(InternalAggregation aggregation) { min = Math.min(min, histogram.buckets.get(0).key); max = Math.max(max, histogram.buckets.get(histogram.buckets.size() - 1).key); for (Bucket bucket : histogram.buckets) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.get(bucket.key); + BucketReducer reducer = bucketsReducer.get(bucket.key); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(reduceContext, size); + reducer = new BucketReducer<>(bucket, reduceContext, size); bucketsReducer.put(bucket.key, reducer); } reducer.accept(bucket); @@ -480,34 +480,34 @@ public InternalAggregation get() { { // fill the array and sort it final int[] index = new int[] { 0 }; - bucketsReducer.iterator().forEachRemaining(c -> keys[index[0]++] = c.key); + bucketsReducer.forEach(c -> keys[index[0]++] = c.key); Arrays.sort(keys); } final List reducedBuckets = new ArrayList<>(); if (keys.length > 0) { // list of buckets coming from different shards that have the same key - MultiBucketAggregatorsReducer currentReducer = null; + BucketReducer currentReducer = null; long key = reduceRounding.round(keys[0]); for (long top : keys) { if (reduceRounding.round(top) != key) { assert currentReducer != null; // the key changes, reduce what we already buffered and reset the buffer for current buckets - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.get())); + reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); currentReducer = null; key = reduceRounding.round(top); } - final MultiBucketAggregatorsReducer nextReducer = bucketsReducer.get(top); + final BucketReducer nextReducer = bucketsReducer.get(top); if (currentReducer == null) { currentReducer = nextReducer; } else { - currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.get())); + currentReducer.accept(createBucket(key, nextReducer.getDocCount(), nextReducer.getAggregations())); } } if (currentReducer != null) { - reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.get())); + reducedBuckets.add(createBucket(key, currentReducer.getDocCount(), currentReducer.getAggregations())); } } @@ -546,7 +546,7 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(c -> Releasables.close(c.value)); + bucketsReducer.forEach(c -> Releasables.close(c.value)); Releasables.close(bucketsReducer); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java similarity index 69% rename from server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java index e7d0e6a17e4c6..a9aa3efd536d4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketReducer.java @@ -15,32 +15,40 @@ import org.elasticsearch.search.aggregations.InternalAggregations; /** - * Class for reducing a list of {@link MultiBucketsAggregation.Bucket} to a single - * {@link InternalAggregations} and the number of documents. + * Class for reducing a list of {@link B} to a single {@link InternalAggregations} + * and the number of documents. */ -public final class MultiBucketAggregatorsReducer implements Releasable { +public final class BucketReducer implements Releasable { private final AggregatorsReducer aggregatorsReducer; + private final B proto; private long count = 0; - public MultiBucketAggregatorsReducer(AggregationReduceContext context, int size) { + public BucketReducer(B proto, AggregationReduceContext context, int size) { this.aggregatorsReducer = new AggregatorsReducer(context, size); + this.proto = proto; } /** - * Adds a {@link MultiBucketsAggregation.Bucket} for reduction. + * Adds a {@link B} for reduction. */ - public void accept(MultiBucketsAggregation.Bucket bucket) { + public void accept(B bucket) { count += bucket.getDocCount(); aggregatorsReducer.accept(bucket.getAggregations()); } + /** + * returns the bucket prototype. + */ + public B getProto() { + return proto; + } + /** * returns the reduced {@link InternalAggregations}. */ - public InternalAggregations get() { + public InternalAggregations getAggregations() { return aggregatorsReducer.get(); - } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java similarity index 67% rename from server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java index 7fc7c96badaaa..b29159c66ac40 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedMultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DelayedBucketReducer.java @@ -16,38 +16,49 @@ import java.util.List; /** - * Class for reducing a list of {@link MultiBucketsAggregation.Bucket} to a single - * {@link InternalAggregations} and the number of documents in a delayable fashion. + * Class for reducing a list of {@link B} to a single {@link InternalAggregations} + * and the number of documents in a delayable fashion. * - * This class can be reused by calling {@link #reset()}. + * This class can be reused by calling {@link #reset(B)}. * - * @see MultiBucketAggregatorsReducer + * @see BucketReducer */ -public final class DelayedMultiBucketAggregatorsReducer { +public final class DelayedBucketReducer { private final AggregationReduceContext context; + // changes at reset time + private B proto; // the maximum size of this array is the number of shards to be reduced. We currently do it in a batches of 256 - // if we expect bigger batches, we might consider to use ObjectArray. + // by default. if we expect bigger batches, we might consider to use ObjectArray. private final List internalAggregations; private long count = 0; - public DelayedMultiBucketAggregatorsReducer(AggregationReduceContext context) { + public DelayedBucketReducer(B proto, AggregationReduceContext context) { + this.proto = proto; this.context = context; this.internalAggregations = new ArrayList<>(); } /** - * Adds a {@link MultiBucketsAggregation.Bucket} for reduction. + * Adds a {@link B} for reduction. */ - public void accept(MultiBucketsAggregation.Bucket bucket) { + public void accept(B bucket) { count += bucket.getDocCount(); internalAggregations.add(bucket.getAggregations()); } + /** + * returns the bucket prototype. + */ + public B getProto() { + return proto; + } + /** * Reset the content of this reducer. */ - public void reset() { + public void reset(B proto) { + this.proto = proto; count = 0L; internalAggregations.clear(); } @@ -55,7 +66,7 @@ public void reset() { /** * returns the reduced {@link InternalAggregations}. */ - public InternalAggregations get() { + public InternalAggregations getAggregations() { try (AggregatorsReducer aggregatorsReducer = new AggregatorsReducer(context, internalAggregations.size())) { for (InternalAggregations agg : internalAggregations) { aggregatorsReducer.accept(agg); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java index 899d9dad7229c..a7261c9fd73f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/FixedMultiBucketAggregatorsReducer.java @@ -23,15 +23,13 @@ public abstract class FixedMultiBucketAggregatorsReducer implements Releasable { // we could use an ObjectArray here but these arrays are in normally small, so it is not worthy - private final MultiBucketAggregatorsReducer[] bucketsReducer; - private final List protoList; + private final List> bucketReducer; public FixedMultiBucketAggregatorsReducer(AggregationReduceContext reduceContext, int size, List protoList) { reduceContext.consumeBucketsAndMaybeBreak(protoList.size()); - this.protoList = protoList; - this.bucketsReducer = new MultiBucketAggregatorsReducer[protoList.size()]; + this.bucketReducer = new ArrayList<>(protoList.size()); for (int i = 0; i < protoList.size(); ++i) { - bucketsReducer[i] = new MultiBucketAggregatorsReducer(reduceContext, size); + bucketReducer.add(new BucketReducer<>(protoList.get(i), reduceContext, size)); } } @@ -40,10 +38,9 @@ public FixedMultiBucketAggregatorsReducer(AggregationReduceContext reduceContext * of the list passed on the constructor */ public final void accept(List buckets) { - assert buckets.size() == protoList.size(); - int i = 0; - for (B bucket : buckets) { - bucketsReducer[i++].accept(bucket); + assert buckets.size() == bucketReducer.size(); + for (int i = 0; i < buckets.size(); i++) { + bucketReducer.get(i).accept(buckets.get(i)); } } @@ -51,19 +48,17 @@ public final void accept(List buckets) { * returns the reduced buckets. */ public final List get() { - final List reduceBuckets = new ArrayList<>(protoList.size()); - for (int i = 0; i < protoList.size(); i++) { - final B proto = protoList.get(i); - final MultiBucketAggregatorsReducer reducer = bucketsReducer[i]; - reduceBuckets.add(createBucket(proto, reducer.getDocCount(), reducer.get())); + final List reduceBuckets = new ArrayList<>(bucketReducer.size()); + for (final BucketReducer reducer : bucketReducer) { + reduceBuckets.add(createBucket(reducer.getProto(), reducer.getDocCount(), reducer.getAggregations())); } return reduceBuckets; } - protected abstract B createBucket(B proto, long focCount, InternalAggregations aggregations); + protected abstract B createBucket(B proto, long docCount, InternalAggregations aggregations); @Override public final void close() { - Releasables.close(bucketsReducer); + Releasables.close(bucketReducer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index fc13dcb6a22ee..22c967bb2ea14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -22,7 +22,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.DelayedMultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.DelayedBucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -257,7 +257,7 @@ public void close() { } private class BucketsQueue implements Releasable { - private final ObjectObjectPagedHashMap bucketReducers; + private final ObjectObjectPagedHashMap> bucketReducers; private final ObjectArrayPriorityQueue queue; private final AggregationReduceContext reduceContext; @@ -274,12 +274,12 @@ protected boolean lessThan(InternalBucket a, InternalBucket b) { /** adds a bucket to the queue. Return false if the bucket is not competitive, otherwise true.*/ boolean add(InternalBucket bucket) { - DelayedMultiBucketAggregatorsReducer delayed = bucketReducers.get(bucket.key); + DelayedBucketReducer delayed = bucketReducers.get(bucket.key); if (delayed == null) { final InternalBucket out = queue.insertWithOverflow(bucket); if (out == null) { // bucket is added - delayed = new DelayedMultiBucketAggregatorsReducer(reduceContext); + delayed = new DelayedBucketReducer<>(bucket, reduceContext); } else if (out == bucket) { // bucket is not competitive return false; @@ -287,7 +287,7 @@ boolean add(InternalBucket bucket) { // bucket replaces existing bucket delayed = bucketReducers.remove(out.key); assert delayed != null; - delayed.reset(); + delayed.reset(bucket); } bucketReducers.put(bucket.key, delayed); } @@ -307,7 +307,7 @@ List get() { * just whatever formats make sense for *its* index. This can be real * trouble when the index doing the reducing is unmapped. */ final var reducedFormats = bucket.formats; - final DelayedMultiBucketAggregatorsReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); + final DelayedBucketReducer reducer = Objects.requireNonNull(bucketReducers.get(bucket.key)); result[i] = new InternalBucket( sourceNames, reducedFormats, @@ -315,7 +315,7 @@ List get() { reverseMuls, missingOrders, reducer.getDocCount(), - reducer.get() + reducer.getAggregations() ); } return List.of(result); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index 4918a57b29ed1..027551288be5f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -17,7 +17,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -81,7 +81,7 @@ public List getBuckets() { protected AggregatorReducer getLeaderReducer(AggregationReduceContext context, int size) { return new AggregatorReducer() { - final LongObjectPagedHashMap bucketsReducer = new LongObjectPagedHashMap<>( + final LongObjectPagedHashMap> bucketsReducer = new LongObjectPagedHashMap<>( size, context.bigArrays() ); @@ -91,9 +91,9 @@ public void accept(InternalAggregation aggregation) { @SuppressWarnings("unchecked") final InternalGeoGrid grid = (InternalGeoGrid) aggregation; for (InternalGeoGridBucket bucket : grid.getBuckets()) { - MultiBucketAggregatorsReducer reducer = bucketsReducer.get(bucket.hashAsLong()); + BucketReducer reducer = bucketsReducer.get(bucket.hashAsLong()); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(context, size); + reducer = new BucketReducer<>(bucket, context, size); bucketsReducer.put(bucket.hashAsLong(), reducer); } reducer.accept(bucket); @@ -106,8 +106,8 @@ public InternalAggregation get() { context.isFinalReduce() == false ? bucketsReducer.size() : Math.min(requiredSize, bucketsReducer.size()) ); try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, context.bigArrays())) { - bucketsReducer.iterator().forEachRemaining(entry -> { - InternalGeoGridBucket bucket = createBucket(entry.key, entry.value.getDocCount(), entry.value.get()); + bucketsReducer.forEach(entry -> { + InternalGeoGridBucket bucket = createBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations()); ordered.insertWithOverflow(bucket); }); final InternalGeoGridBucket[] list = new InternalGeoGridBucket[(int) ordered.size()]; @@ -121,7 +121,7 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(r -> Releasables.close(r.value)); + bucketsReducer.forEach(r -> Releasables.close(r.value)); Releasables.close(bucketsReducer); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 3478773464feb..27a79095eb49d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -492,7 +492,7 @@ public void accept(InternalAggregation aggregation) { long key = NumericUtils.doubleToSortableLong(bucket.centroid()); ReducerAndExtraInfo reducer = bucketsReducer.get(key); if (reducer == null) { - reducer = new ReducerAndExtraInfo(new MultiBucketAggregatorsReducer(reduceContext, size)); + reducer = new ReducerAndExtraInfo(new BucketReducer<>(bucket, reduceContext, size)); bucketsReducer.put(key, reducer); reduceContext.consumeBucketsAndMaybeBreak(1); } @@ -506,10 +506,12 @@ public void accept(InternalAggregation aggregation) { @Override public InternalAggregation get() { final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.iterator().forEachRemaining(entry -> { + bucketsReducer.forEach(entry -> { final double centroid = entry.value.sum[0] / entry.value.reducer.getDocCount(); final Bucket.BucketBounds bounds = new Bucket.BucketBounds(entry.value.min[0], entry.value.max[0]); - reducedBuckets.add(new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.get())); + reducedBuckets.add( + new Bucket(centroid, bounds, entry.value.reducer.getDocCount(), format, entry.value.reducer.getAggregations()) + ); }); reducedBuckets.sort(Comparator.comparing(Bucket::centroid)); mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); @@ -523,14 +525,14 @@ public InternalAggregation get() { @Override public void close() { - bucketsReducer.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + bucketsReducer.forEach(entry -> Releasables.close(entry.value.reducer)); Releasables.close(bucketsReducer); } }; } - private record ReducerAndExtraInfo(MultiBucketAggregatorsReducer reducer, double[] min, double[] max, double[] sum) { - private ReducerAndExtraInfo(MultiBucketAggregatorsReducer reducer) { + private record ReducerAndExtraInfo(BucketReducer reducer, double[] min, double[] max, double[] sum) { + private ReducerAndExtraInfo(BucketReducer reducer) { this(reducer, new double[] { Double.POSITIVE_INFINITY }, new double[] { Double.NEGATIVE_INFINITY }, new double[] { 0 }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java index 71374421481eb..07208ab2096a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/LongKeyedMultiBucketsAggregatorReducer.java @@ -13,7 +13,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import java.util.ArrayList; @@ -28,7 +28,7 @@ abstract class LongKeyedMultiBucketsAggregatorReducer bucketsReducer; + private final LongObjectPagedHashMap> bucketsReducer; int consumeBucketCount = 0; LongKeyedMultiBucketsAggregatorReducer(AggregationReduceContext reduceContext, int size, long minDocCount) { @@ -42,16 +42,16 @@ abstract class LongKeyedMultiBucketsAggregatorReducer reducer = bucketsReducer.get(key); if (reducer == null) { - reducer = new MultiBucketAggregatorsReducer(reduceContext, size); + reducer = new BucketReducer<>(bucket, reduceContext, size); bucketsReducer.put(key, reducer); } consumeBucketsAndMaybeBreak(reducer, bucket); reducer.accept(bucket); } - private void consumeBucketsAndMaybeBreak(MultiBucketAggregatorsReducer reducer, B bucket) { + private void consumeBucketsAndMaybeBreak(BucketReducer reducer, B bucket) { if (reduceContext.isFinalReduce() == false || minDocCount == 0) { if (reducer.getDocCount() == 0 && bucket.getDocCount() > 0) { consumeBucketsAndMaybeBreak(); @@ -76,9 +76,9 @@ private void consumeBucketsAndMaybeBreak() { public final List get() { reduceContext.consumeBucketsAndMaybeBreak(consumeBucketCount); final List reducedBuckets = new ArrayList<>((int) bucketsReducer.size()); - bucketsReducer.iterator().forEachRemaining(entry -> { + bucketsReducer.forEach(entry -> { if (reduceContext.isFinalReduce() == false || entry.value.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.key, entry.value.getDocCount(), entry.value.get())); + reducedBuckets.add(createBucket(entry.key, entry.value.getDocCount(), entry.value.getAggregations())); } }); return reducedBuckets; @@ -91,7 +91,7 @@ public final List get() { @Override public final void close() { - bucketsReducer.iterator().forEachRemaining(r -> Releasables.close(r.value)); + bucketsReducer.forEach(r -> Releasables.close(r.value)); Releasables.close(bucketsReducer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index 50b92a2e77841..3557947bb9ea7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.KeyComparable; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -225,7 +225,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) { return new AggregatorReducer() { - final ObjectObjectPagedHashMap buckets = new ObjectObjectPagedHashMap<>( + final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( getBuckets().size(), reduceContext.bigArrays() ); @@ -234,29 +234,29 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont public void accept(InternalAggregation aggregation) { final InternalIpPrefix ipPrefix = (InternalIpPrefix) aggregation; for (Bucket bucket : ipPrefix.getBuckets()) { - ReducerAndProto reducerAndProto = buckets.get(bucket.key); - if (reducerAndProto == null) { - reducerAndProto = new ReducerAndProto(new MultiBucketAggregatorsReducer(reduceContext, size), bucket); + BucketReducer bucketReducer = buckets.get(bucket.key); + if (bucketReducer == null) { + bucketReducer = new BucketReducer<>(bucket, reduceContext, size); boolean success = false; try { - buckets.put(bucket.key, reducerAndProto); + buckets.put(bucket.key, bucketReducer); success = true; } finally { if (success == false) { - Releasables.close(reducerAndProto.reducer); + Releasables.close(bucketReducer); } } } - reducerAndProto.reducer.accept(bucket); + bucketReducer.accept(bucket); } } @Override public InternalAggregation get() { final List reducedBuckets = new ArrayList<>(Math.toIntExact(buckets.size())); - buckets.iterator().forEachRemaining(entry -> { - if (false == reduceContext.isFinalReduce() || entry.value.reducer.getDocCount() >= minDocCount) { - reducedBuckets.add(createBucket(entry.value.proto, entry.value.reducer.get(), entry.value.reducer.getDocCount())); + buckets.forEach(entry -> { + if (false == reduceContext.isFinalReduce() || entry.value.getDocCount() >= minDocCount) { + reducedBuckets.add(createBucket(entry.value.getProto(), entry.value.getAggregations(), entry.value.getDocCount())); } }); reduceContext.consumeBucketsAndMaybeBreak(reducedBuckets.size()); @@ -266,14 +266,12 @@ public InternalAggregation get() { @Override public void close() { - buckets.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + buckets.forEach(entry -> Releasables.close(entry.value)); Releasables.close(buckets); } }; } - private record ReducerAndProto(MultiBucketAggregatorsReducer reducer, Bucket proto) {} - @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { if (keyed) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index 440e42f845ce2..f8e7f3cf3a69c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -18,7 +18,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregatorsReducer; +import org.elasticsearch.search.aggregations.bucket.BucketReducer; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; @@ -202,7 +203,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont return new AggregatorReducer() { long globalSubsetSize = 0; long globalSupersetSize = 0; - final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( + final ObjectObjectPagedHashMap> buckets = new ObjectObjectPagedHashMap<>( getBuckets().size(), reduceContext.bigArrays() ); @@ -216,22 +217,22 @@ public void accept(InternalAggregation aggregation) { globalSubsetSize += terms.getSubsetSize(); globalSupersetSize += terms.getSupersetSize(); for (B bucket : terms.getBuckets()) { - ReducerAndProto reducerAndProto = buckets.get(bucket.getKeyAsString()); - if (reducerAndProto == null) { - reducerAndProto = new ReducerAndProto<>(new MultiBucketAggregatorsReducer(reduceContext, size), bucket); + ReducerAndExtraInfo reducerAndExtraInfo = buckets.get(bucket.getKeyAsString()); + if (reducerAndExtraInfo == null) { + reducerAndExtraInfo = new ReducerAndExtraInfo<>(new BucketReducer<>(bucket, reduceContext, size)); boolean success = false; try { - buckets.put(bucket.getKeyAsString(), reducerAndProto); + buckets.put(bucket.getKeyAsString(), reducerAndExtraInfo); success = true; } finally { if (success == false) { - Releasables.close(reducerAndProto.reducer); + Releasables.close(reducerAndExtraInfo.reducer); } } } - reducerAndProto.reducer.accept(bucket); - reducerAndProto.subsetDf[0] += bucket.subsetDf; - reducerAndProto.supersetDf[0] += bucket.supersetDf; + reducerAndExtraInfo.reducer.accept(bucket); + reducerAndExtraInfo.subsetDf[0] += bucket.subsetDf; + reducerAndExtraInfo.supersetDf[0] += bucket.supersetDf; } } @@ -240,14 +241,14 @@ public InternalAggregation get() { final SignificanceHeuristic heuristic = getSignificanceHeuristic().rewrite(reduceContext); final int size = (int) (reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); try (BucketSignificancePriorityQueue ordered = new BucketSignificancePriorityQueue<>(size, reduceContext.bigArrays())) { - buckets.iterator().forEachRemaining(entry -> { + buckets.forEach(entry -> { final B b = createBucket( entry.value.subsetDf[0], globalSubsetSize, entry.value.supersetDf[0], globalSupersetSize, - entry.value.reducer.get(), - entry.value.proto + entry.value.reducer.getAggregations(), + entry.value.reducer.getProto() ); b.updateScore(heuristic); if (((b.score > 0) && (b.subsetDf >= minDocCount)) || reduceContext.isFinalReduce() == false) { @@ -271,15 +272,19 @@ public InternalAggregation get() { @Override public void close() { - buckets.iterator().forEachRemaining(entry -> Releasables.close(entry.value.reducer)); + buckets.forEach(entry -> Releasables.close(entry.value.reducer)); Releasables.close(buckets); } }; } - private record ReducerAndProto(MultiBucketAggregatorsReducer reducer, B proto, long[] subsetDf, long[] supersetDf) { - private ReducerAndProto(MultiBucketAggregatorsReducer reducer, B proto) { - this(reducer, proto, new long[] { 0 }, new long[] { 0 }); + private record ReducerAndExtraInfo( + BucketReducer reducer, + long[] subsetDf, + long[] supersetDf + ) { + private ReducerAndExtraInfo(BucketReducer reducer) { + this(reducer, new long[] { 0 }, new long[] { 0 }); } } From a3d96b93337d4af17ca85157f587d3cf37029d9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 26 Mar 2024 08:20:34 +0100 Subject: [PATCH 167/214] [DOCS] Changes model_id path param to inference_id (#106719) --- .../inference/delete-inference.asciidoc | 8 ++++---- .../inference/get-inference.asciidoc | 10 +++++----- .../inference/post-inference.asciidoc | 12 +++++------ .../inference/put-inference.asciidoc | 20 +++++++++---------- .../semantic-search-inference.asciidoc | 4 ++-- .../infer-api-ingest-pipeline.asciidoc | 8 ++++---- .../inference-api/infer-api-search.asciidoc | 4 ++-- .../inference-api/infer-api-task.asciidoc | 6 ++++-- 8 files changed, 37 insertions(+), 35 deletions(-) diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 4b661236aa928..5b693f51d65da 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -16,9 +16,9 @@ own model, use the <>. [[delete-inference-api-request]] ==== {api-request-title} -`DELETE /_inference/` +`DELETE /_inference/` -`DELETE /_inference//` +`DELETE /_inference//` [discrete] [[delete-inference-api-prereqs]] @@ -32,9 +32,9 @@ own model, use the <>. [[delete-inference-api-path-params]] ==== {api-path-parms-title} -:: +:: (Required, string) -The unique identifier of the {infer} model to delete. +The unique identifier of the {infer} endpoint to delete. :: (Optional, string) diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 705bc4e7a8c61..1a11904a169ca 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -18,11 +18,11 @@ own model, use the <>. `GET /_inference/_all` -`GET /_inference/` +`GET /_inference/` `GET /_inference//_all` -`GET /_inference//` +`GET /_inference//` [discrete] [[get-inference-api-prereqs]] @@ -47,9 +47,9 @@ and a wildcard expression, [[get-inference-api-path-params]] ==== {api-path-parms-title} -``:: +``:: (Optional, string) -The unique identifier of the {infer} model. +The unique identifier of the {infer} endpoint. ``:: @@ -77,7 +77,7 @@ The API returns the following response: [source,console-result] ------------------------------------------------------------ { - "model_id": "my-elser-model", + "inference_id": "my-elser-model", "task_type": "sparse_embedding", "service": "elser", "service_settings": { diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 970cec7f4a452..e4cbd26904271 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -16,9 +16,9 @@ own model, use the <>. [[post-inference-api-request]] ==== {api-request-title} -`POST /_inference/` +`POST /_inference/` -`POST /_inference//` +`POST /_inference//` [discrete] @@ -32,8 +32,8 @@ own model, use the <>. [[post-inference-api-desc]] ==== {api-description-title} -The perform {infer} API enables you to use {infer} models to perform specific -tasks on data that you provide as an input. The API returns a response with the +The perform {infer} API enables you to use {ml} models to perform specific tasks +on data that you provide as an input. The API returns a response with the resutls of the tasks. The {infer} model you use can perform one specific task that has been defined when the model was created with the <>. @@ -42,9 +42,9 @@ that has been defined when the model was created with the <>. [[post-inference-api-path-params]] ==== {api-path-parms-title} -``:: +``:: (Required, string) -The unique identifier of the {infer} model. +The unique identifier of the {infer} endpoint. ``:: diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 9d5e187f5994a..c0b9d508e13c3 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -33,7 +33,7 @@ or if you want to use non-NLP models, use the <>. [[put-inference-api-desc]] ==== {api-description-title} -The create {infer} API enables you to create and configure an {infer} model to +The create {infer} API enables you to create and configure a {ml} model to perform a specific {infer} task. The following services are available through the {infer} API: @@ -50,9 +50,9 @@ The following services are available through the {infer} API: ==== {api-path-parms-title} -``:: +``:: (Required, string) -The unique identifier of the model. +The unique identifier of the {infer} endpoint. ``:: (Required, string) @@ -246,7 +246,7 @@ This section contains example API calls for every service type. [[inference-example-cohere]] ===== Cohere service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `cohere_embeddings` to perform a `text_embedding` task type. [source,console] @@ -268,7 +268,7 @@ PUT _inference/text_embedding/cohere-embeddings [[inference-example-e5]] ===== E5 via the elasticsearch service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-e5-model` to perform a `text_embedding` task type. [source,console] @@ -293,7 +293,7 @@ further details, refer to the {ml-docs}/ml-nlp-e5.html[E5 model documentation]. [[inference-example-elser]] ===== ELSER service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-elser-model` to perform a `sparse_embedding` task type. [source,console] @@ -315,7 +315,7 @@ Example response: [source,console-result] ------------------------------------------------------------ { - "model_id": "my-elser-model", + "inference_id": "my-elser-model", "task_type": "sparse_embedding", "service": "elser", "service_settings": { @@ -332,7 +332,7 @@ Example response: [[inference-example-hugging-face]] ===== Hugging Face service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `hugging-face_embeddings` to perform a `text_embedding` task type. [source,console] @@ -362,7 +362,7 @@ after the endpoint initialization has been finished. [[inference-example-eland]] ===== Models uploaded by Eland via the elasticsearch service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `my-msmarco-minilm-model` to perform a `text_embedding` task type. [source,console] @@ -387,7 +387,7 @@ been [[inference-example-openai]] ===== OpenAI service -The following example shows how to create an {infer} model called +The following example shows how to create an {infer} endpoint called `openai_embeddings` to perform a `text_embedding` task type. [source,console] diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 97a37e34eb116..b5619f8dda7b9 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -23,9 +23,9 @@ include::{es-repo-dir}/tab-widgets/inference-api/infer-api-requirements-widget.a [discrete] [[infer-text-embedding-task]] -==== Create the inference task +==== Create an inference endpoint -Create the {infer} task by using the <>: +Create an {infer} endpoint by using the <>: include::{es-repo-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc index a5a1910e8f8ef..39f37f407926e 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-ingest-pipeline.asciidoc @@ -28,8 +28,8 @@ PUT _ingest/pipeline/cohere_embeddings ] } -------------------------------------------------- -<1> The name of the inference configuration you created by using the -<>. +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. @@ -55,8 +55,8 @@ PUT _ingest/pipeline/openai_embeddings ] } -------------------------------------------------- -<1> The name of the inference configuration you created by using the -<>. +<1> The name of the inference endpoint you created by using the +<>, it's referred to as `inference_id` in that step. <2> Configuration object that defines the `input_field` for the {infer} process and the `output_field` that will contain the {infer} results. diff --git a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc index 1e8470471491f..843c351648c63 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-search.asciidoc @@ -8,7 +8,7 @@ GET cohere-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "model_id": "cohere_embeddings", + "inference_id": "cohere_embeddings", "model_text": "Muscles in human body" } }, @@ -83,7 +83,7 @@ GET openai-embeddings/_search "field": "content_embedding", "query_vector_builder": { "text_embedding": { - "model_id": "openai_embeddings", + "inference_id": "openai_embeddings", "model_text": "Calculate fuel cost" } }, diff --git a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc index 7c6e750138c1e..dea7511f74566 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-task.asciidoc @@ -13,7 +13,8 @@ PUT _inference/text_embedding/cohere_embeddings <1> } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path. +<1> The task type is `text_embedding` in the path and the `inference_id` which +is the unique identifier of the {infer} endpoint is `cohere_embeddings`. <2> The API key of your Cohere account. You can find your API keys in your Cohere dashboard under the https://dashboard.cohere.com/api-keys[API keys section]. You need to provide @@ -46,7 +47,8 @@ PUT _inference/text_embedding/openai_embeddings <1> } ------------------------------------------------------------ // TEST[skip:TBD] -<1> The task type is `text_embedding` in the path. +<1> The task type is `text_embedding` in the path and the `inference_id` which +is the unique identifier of the {infer} endpoint is `openai_embeddings`. <2> The API key of your OpenAI account. You can find your OpenAI API keys in your OpenAI account under the https://platform.openai.com/api-keys[API keys section]. You need to provide From b39b3731a7923f38eddbe815e4f2c777261cd7d1 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 26 Mar 2024 08:39:39 +0100 Subject: [PATCH 168/214] Port krb5kdc to test container and rework hdfs handling (#106228) This ports our krb5kdc test fixture to test container and reworks hdfs handling to also be based on test containers. The yaml rest tests that are using hdfs required introducing variable substitution in yamlresttestparser handling. --- .../shadow/XmlClassRelocationTransformer.java | 142 ++++++ .../test/rest/InternalJavaRestTestPlugin.java | 10 +- .../test/rest/InternalYamlRestTestPlugin.java | 9 +- plugins/repository-hdfs/build.gradle | 479 ++++++------------ .../AbstractHaHdfsFailoverTestSuiteIT.java | 75 +++ .../hdfs/HaHdfsFailoverTestSuiteIT.java | 280 +--------- .../hdfs/SecureHaHdfsFailoverTestSuiteIT.java | 55 ++ .../hdfs/HdfsBlobStoreContainerTests.java | 1 + .../hdfs/HdfsBlobStoreRepositoryTests.java | 1 + .../hdfs/HdfsRepositoryTests.java | 1 + .../repositories/hdfs/HdfsTests.java | 1 + .../RepositoryHdfsClientYamlTestSuiteIT.java | 32 +- ...reRepositoryHdfsClientYamlTestSuiteIT.java | 62 +++ settings.gradle | 3 +- test/fixtures/hdfs-fixture/build.gradle | 90 ++++ .../hdfs/HdfsClientThreadLeakFilter.java | 9 +- .../test/fixtures/hdfs/HdfsFixture.java | 438 ++++++++++++++++ .../main/resources/readonly-repository.tar.gz | Bin test/fixtures/hdfs2-fixture/build.gradle | 13 - .../src/main/java/hdfs/MiniHDFS.java | 175 ------- test/fixtures/hdfs3-fixture/build.gradle | 13 - .../src/main/java/hdfs/MiniHDFS.java | 176 ------- .../main/resources/readonly-repository.tar.gz | Bin 1314 -> 0 bytes test/fixtures/krb5kdc-fixture/build.gradle | 64 ++- .../krb5kdc-fixture/docker-compose.yml | 32 -- .../fixtures/krb5kdc/Krb5kDcContainer.java | 172 +++++++ .../src/main/resources/provision/hdfs.sh | 2 +- .../DockerEnvironmentAwareTestContainer.java | 8 +- .../cluster/util/resource/FileResource.java | 7 +- .../test/cluster/util/resource/Resource.java | 4 + .../rest/yaml/ESClientYamlSuiteTestCase.java | 28 +- .../ParameterizableYamlXContentParser.java | 295 +++++++++++ .../yaml/section/ClientYamlTestSuite.java | 25 +- .../searchable-snapshots/qa/hdfs/build.gradle | 172 +------ .../hdfs/HdfsSearchableSnapshotsIT.java | 46 +- .../hdfs/SecureHdfsSearchableSnapshotsIT.java | 67 +++ .../qa/hdfs/build.gradle | 168 +----- .../AbstractHdfsSnapshotRepoTestKitIT.java | 38 ++ .../testkit/HdfsSnapshotRepoTestKitIT.java | 50 +- .../SecureHdfsSnapshotRepoTestKitIT.java | 63 +++ x-pack/qa/kerberos-tests/build.gradle | 44 +- .../kerberos/KerberosAuthenticationIT.java | 41 +- 42 files changed, 1912 insertions(+), 1479 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java create mode 100644 plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java create mode 100644 plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java create mode 100644 plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java create mode 100644 test/fixtures/hdfs-fixture/build.gradle rename {plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories => test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures}/hdfs/HdfsClientThreadLeakFilter.java (77%) create mode 100644 test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java rename test/fixtures/{hdfs2-fixture => hdfs-fixture}/src/main/resources/readonly-repository.tar.gz (100%) delete mode 100644 test/fixtures/hdfs2-fixture/build.gradle delete mode 100644 test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java delete mode 100644 test/fixtures/hdfs3-fixture/build.gradle delete mode 100644 test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java delete mode 100644 test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz delete mode 100644 test/fixtures/krb5kdc-fixture/docker-compose.yml create mode 100644 test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java create mode 100644 test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java create mode 100644 x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java new file mode 100644 index 0000000000000..b365142282785 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/shadow/XmlClassRelocationTransformer.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.shadow; + +import com.github.jengelman.gradle.plugins.shadow.ShadowStats; +import com.github.jengelman.gradle.plugins.shadow.relocation.RelocateClassContext; +import com.github.jengelman.gradle.plugins.shadow.relocation.Relocator; +import com.github.jengelman.gradle.plugins.shadow.transformers.Transformer; +import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext; + +import org.apache.commons.io.IOUtils; +import org.apache.tools.zip.ZipEntry; +import org.apache.tools.zip.ZipOutputStream; +import org.gradle.api.file.FileTreeElement; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.List; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; + +public class XmlClassRelocationTransformer implements Transformer { + + boolean hasTransformedResource = false; + + private Document doc; + + private String resource; + + @Override + public boolean canTransformResource(FileTreeElement element) { + String path = element.getRelativePath().getPathString(); + if (resource != null && resource.equals(path)) { + return true; + } + return false; + } + + @Override + public void transform(TransformerContext context) { + try { + BufferedInputStream bis = new BufferedInputStream(context.getIs()); + DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + doc = dBuilder.parse(bis); + doc.getDocumentElement().normalize(); + Node root = doc.getDocumentElement(); + walkThroughNodes(root, context); + if (hasTransformedResource == false) { + this.doc = null; + } + } catch (Exception e) { + throw new RuntimeException("Error parsing xml file in " + context.getIs(), e); + } + } + + private static String getRelocatedClass(String className, TransformerContext context) { + List relocators = context.getRelocators(); + ShadowStats stats = context.getStats(); + if (className != null && className.length() > 0 && relocators != null) { + for (Relocator relocator : relocators) { + if (relocator.canRelocateClass(className)) { + RelocateClassContext relocateClassContext = new RelocateClassContext(className, stats); + return relocator.relocateClass(relocateClassContext); + } + } + } + + return className; + } + + private void walkThroughNodes(Node node, TransformerContext context) { + if (node.getNodeType() == Node.TEXT_NODE) { + String nodeValue = node.getNodeValue(); + if (nodeValue.isBlank() == false) { + String relocatedClass = getRelocatedClass(nodeValue, context); + if (relocatedClass.equals(nodeValue) == false) { + node.setNodeValue(relocatedClass); + hasTransformedResource = true; + } + } + } + NodeList nodeList = node.getChildNodes(); + for (int i = 0; i < nodeList.getLength(); i++) { + Node currentNode = nodeList.item(i); + walkThroughNodes(currentNode, context); + } + } + + @Override + public boolean hasTransformedResource() { + return hasTransformedResource; + } + + @Override + public void modifyOutputStream(ZipOutputStream os, boolean preserveFileTimestamps) { + ZipEntry entry = new ZipEntry(resource); + entry.setTime(TransformerContext.getEntryTimestamp(preserveFileTimestamps, entry.getTime())); + + try { + // Write the content back to the XML file + TransformerFactory transformerFactory = TransformerFactory.newInstance(); + DOMSource source = new DOMSource(doc); + + // Result stream will be a ByteArrayOutputStream + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + StreamResult result = new StreamResult(baos); + // Do the transformation and serialization + transformerFactory.newTransformer().transform(source, result); + os.putNextEntry(entry); + IOUtils.write(baos.toByteArray(), os); + os.closeEntry(); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (TransformerException e) { + throw new RuntimeException(e); + } finally { + hasTransformedResource = false; + doc = null; + } + } + + @Override + public String getName() { + return getClass().getSimpleName(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java index f3950c8646292..1787ebcccf3a9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalJavaRestTestPlugin.java @@ -8,7 +8,7 @@ package org.elasticsearch.gradle.internal.test.rest; -import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask; +import org.elasticsearch.gradle.internal.test.RestIntegTestTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -40,13 +40,7 @@ public void apply(Project project) { } // setup the javaRestTest task - // we use a StandloneRestIntegTestTask here so that the conventions of RestTestBasePlugin don't create a test cluster - TaskProvider testTask = registerTestTask( - project, - javaTestSourceSet, - SOURCE_SET_NAME, - StandaloneRestIntegTestTask.class - ); + TaskProvider testTask = registerTestTask(project, javaTestSourceSet, SOURCE_SET_NAME, RestIntegTestTask.class); project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(testTask)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java index 66d3507f7f9b3..ba40998e2b02a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/InternalYamlRestTestPlugin.java @@ -8,7 +8,7 @@ package org.elasticsearch.gradle.internal.test.rest; -import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask; +import org.elasticsearch.gradle.internal.test.RestIntegTestTask; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; @@ -36,12 +36,7 @@ public void apply(Project project) { SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); SourceSet yamlTestSourceSet = sourceSets.create(SOURCE_SET_NAME); - TaskProvider testTask = registerTestTask( - project, - yamlTestSourceSet, - SOURCE_SET_NAME, - StandaloneRestIntegTestTask.class - ); + TaskProvider testTask = registerTestTask(project, yamlTestSourceSet, SOURCE_SET_NAME, RestIntegTestTask.class); project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(testTask)); diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 57f4fc9a04ecd..beaf8723df4d5 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -6,376 +6,179 @@ * Side Public License, v 1. */ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.HdfsUtils -import java.nio.file.Path -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { - description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' - classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' + description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' + classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } versions << [ - 'hadoop': '3.3.3' + 'hadoop': '3.3.3' ] -final int minTestedHadoopVersion = 2; -final int maxTestedHadoopVersion = 3; - -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs" - configurations { - krb5Config - krb5Keytabs + hdfsFixture2 + hdfsFixture3 } -dependencies { - api project(path: 'hadoop-client-api', configuration: 'shadow') - if (isEclipse) { - /* - * Eclipse can't pick up the shadow dependency so we point it at *something* - * so it can compile things. - */ - api project(path: 'hadoop-client-api') - } - runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" - implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" - api "com.google.protobuf:protobuf-java:${versions.protobuf}" - api "commons-logging:commons-logging:${versions.commonslogging}" - api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" - api 'commons-cli:commons-cli:1.2' - api "commons-codec:commons-codec:${versions.commonscodec}" - api 'commons-io:commons-io:2.8.0' - api 'org.apache.commons:commons-lang3:3.11' - api 'javax.servlet:javax.servlet-api:3.1.0' - api "org.slf4j:slf4j-api:${versions.slf4j}" - runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" - // runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") https://github.com/elastic/elasticsearch/issues/93714 - krb5Keytabs project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5KeytabsHdfsDir') - krb5Config project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5ConfHdfsFile') +dependencies { + api project(path: 'hadoop-client-api', configuration: 'shadow') + if (isEclipse) { + /* + * Eclipse can't pick up the shadow dependency so we point it at *something* + * so it can compile things. + */ + api project(path: 'hadoop-client-api') + } + runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" + implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" + api "com.google.protobuf:protobuf-java:${versions.protobuf}" + api "commons-logging:commons-logging:${versions.commonslogging}" + api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + api 'commons-cli:commons-cli:1.2' + api "commons-codec:commons-codec:${versions.commonscodec}" + api 'commons-io:commons-io:2.8.0' + api 'org.apache.commons:commons-lang3:3.11' + api 'javax.servlet:javax.servlet-api:3.1.0' + api "org.slf4j:slf4j-api:${versions.slf4j}" + runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" + // https://github.com/elastic/elasticsearch/issues/93714 + // runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") + + testImplementation(project(':test:fixtures:hdfs-fixture')) + javaRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) + + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + javaRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + + yamlRestTestCompileOnly(project(':test:fixtures:hdfs-fixture')) + yamlRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + yamlRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + yamlRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + yamlRestTestRuntimeOnly "commons-cli:commons-cli:1.2" + yamlRestTestRuntimeOnly "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + + hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2') + hdfsFixture3 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadow') } restResources { - restApi { - include '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot' - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } + restApi { + include '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot' + } } tasks.named("dependencyLicenses").configure { - mapping from: /hadoop-.*/, to: 'hadoop' + mapping from: /hadoop-.*/, to: 'hadoop' } -// TODO work that into the java rest test plugin when combined with java plugin -sourceSets { - javaRestTest { - compileClasspath = compileClasspath + main.compileClasspath - runtimeClasspath = runtimeClasspath + main.runtimeClasspath + files("src/main/plugin-metadata") - } +tasks.withType(RestIntegTestTask).configureEach { + usesDefaultDistribution() + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } tasks.named('javaRestTest').configure { - enabled = false + classpath = sourceSets.javaRestTest.runtimeClasspath + configurations.hdfsFixture3 } -tasks.named('yamlRestTest').configure { - enabled = false +tasks.register("javaRestTestHdfs2", RestIntegTestTask) { + description = "Runs rest tests against an elasticsearch cluster with HDFS version 2" + testClassesDirs = sourceSets.javaRestTest.output.classesDirs + classpath = sourceSets.javaRestTest.runtimeClasspath + configurations.hdfsFixture2 } -String realm = "BUILD.ELASTIC.CO" -String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs") - -// Determine HDFS Fixture compatibility for the current build environment. -ext.fixtureSupported = project.provider(() -> HdfsUtils.isHdfsFixtureSupported(project)) - -for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoopVersion; hadoopVersion++) { - final int hadoopVer = hadoopVersion - - configurations.create("hdfs" + hadoopVersion + "Fixture") - dependencies.add("hdfs" + hadoopVersion + "Fixture", project(':test:fixtures:hdfs' + hadoopVersion + '-fixture')) - - for (String fixtureName : ['hdfs' + hadoopVersion + 'Fixture', 'haHdfs' + hadoopVersion + 'Fixture', 'secureHdfs' + hadoopVersion + 'Fixture', 'secureHaHdfs' + hadoopVersion + 'Fixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - executable = "${BuildParams.runtimeJavaHome}/bin/java" - dependsOn project.configurations.getByName("hdfs" + hadoopVer + "Fixture"), project.configurations.krb5Config, project.configurations.krb5Keytabs - env 'CLASSPATH', "${-> project.configurations.getByName("hdfs" + hadoopVer + "Fixture").asPath}" - - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.startsWith('secure')) { - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${project.configurations.krb5Config.getSingleFile().getPath()}") - miniHDFSArgs.add("-Dhdfs.config.port=" + getSecureNamenodePortForVersion(hadoopVer)) - } else { - miniHDFSArgs.add("-Dhdfs.config.port=" + getNonSecureNamenodePortForVersion(hadoopVer)) - } - // If it's an HA fixture, set a nameservice to use in the JVM options - if (name.startsWith('haHdfs') || name.startsWith('secureHaHdfs')) { - miniHDFSArgs.add("-Dha-nameservice=ha-hdfs") - } - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.startsWith('secure')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(new File(project.configurations.krb5Keytabs.singleFile, "hdfs_hdfs.build.elastic.co.keytab").getPath()) - } - - args miniHDFSArgs.toArray() - } - } - - for (String integTestTaskName : ['javaRestTest' + hadoopVersion, 'javaRestTestSecure' + hadoopVersion]) { - tasks.register(integTestTaskName, RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer + "-HA" - - if (name.contains("Secure")) { - dependsOn "secureHaHdfs" + hadoopVer + "Fixture" - } - - File portsFileDir = file("${workingDir}/hdfs" + hadoopVer + "Fixture") - Path portsFile = name.contains("Secure") ? - buildDir.toPath() - .resolve("fixtures") - .resolve("secureHaHdfs" + hadoopVer + "Fixture") - .resolve("ports") : - buildDir.toPath() - .resolve("fixtures") - .resolve("haHdfs" + hadoopVer + "Fixture") - .resolve("ports") - nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports") - - // Copy ports file to separate location which is placed on the test classpath - doFirst { - mkdir(portsFileDir) - copy { - from portsFile - into portsFileDir - } - } - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - // Set the keytab files in the classpath so that we can access them from test code without the security manager - // freaking out. - classpath = sourceSets.javaRestTest.runtimeClasspath + - configurations.krb5Keytabs + - files(portsFileDir) - } - } - - for (String integTestTaskName : ['yamlRestTest' + hadoopVersion, 'yamlRestTestSecure' + hadoopVersion]) { - tasks.register(integTestTaskName, RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer - - if (name.contains("Secure")) { - dependsOn "secureHdfs" + hadoopVer + "Fixture" - } - - testClassesDirs = sourceSets.yamlRestTest.output.classesDirs - classpath = sourceSets.yamlRestTest.runtimeClasspath - } - } - - def processHadoopTestResources = tasks.register("processHadoop" + hadoopVer + "TestResources", Copy) - processHadoopTestResources.configure { - Map expansions = [ - 'hdfs_port' : getNonSecureNamenodePortForVersion(hadoopVer), - 'secure_hdfs_port': getSecureNamenodePortForVersion(hadoopVer), - ] - inputs.properties(expansions) - filter("tokens": expansions.collectEntries { k, v -> [k, v.toString()]}, ReplaceTokens.class) - it.into("build/resources/yamlRestTest/rest-api-spec/test") - it.into("hdfs_repository_" + hadoopVer) { - from "src/yamlRestTest/resources/rest-api-spec/test/hdfs_repository" - } - it.into("secure_hdfs_repository_" + hadoopVer) { - from "src/yamlRestTest/resources/rest-api-spec/test/secure_hdfs_repository" - } - } - tasks.named("processYamlRestTestResources").configure { - dependsOn(processHadoopTestResources) - } - - if (fixtureSupported.get()) { - // Check depends on the HA test. Already depends on the standard test. - tasks.named("check").configure { - dependsOn("javaRestTest" + hadoopVer) - } - - // Both standard and HA tests depend on their respective HDFS fixtures - tasks.named("yamlRestTest" + hadoopVer).configure { - dependsOn "hdfs" + hadoopVer + "Fixture" - // The normal test runner only runs the standard hdfs rest tests - systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer - } - tasks.named("javaRestTest" + hadoopVer).configure { - dependsOn "haHdfs" + hadoopVer + "Fixture" - } - } else { - // The normal integration test runner will just test that the plugin loads - tasks.named("yamlRestTest" + hadoopVer).configure { - systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer + '/10_basic' - } - // HA fixture is unsupported. Don't run them. - tasks.named("javaRestTestSecure" + hadoopVer).configure { - enabled = false - } - } - - tasks.named("check").configure { - dependsOn("yamlRestTest" + hadoopVer, "yamlRestTestSecure" + hadoopVer, "javaRestTestSecure" + hadoopVer) - } - - // Run just the secure hdfs rest test suite. - tasks.named("yamlRestTestSecure" + hadoopVer).configure { - systemProperty 'tests.rest.suite', 'secure_hdfs_repository_' + hadoopVer - } -} - - -def getSecureNamenodePortForVersion(hadoopVersion) { - return 10002 - (2 * hadoopVersion) +tasks.named('yamlRestTest').configure { + classpath = sourceSets.yamlRestTest.runtimeClasspath + configurations.hdfsFixture2 } -def getNonSecureNamenodePortForVersion(hadoopVersion) { - return 10003 - (2 * hadoopVersion) +tasks.register("yamlRestTestHdfs2", RestIntegTestTask) { + description = "Runs yaml rest tests against an elasticsearch cluster with HDFS version 2" + testClassesDirs = sourceSets.yamlRestTest.output.classesDirs + classpath = sourceSets.yamlRestTest.runtimeClasspath + configurations.hdfsFixture2 } -Set disabledIntegTestTaskNames = [] - -tasks.withType(RestIntegTestTask).configureEach { testTask -> - if (disabledIntegTestTaskNames.contains(name)) { - enabled = false; - } - BuildParams.withFipsEnabledOnly(testTask) - - if (name.contains("Secure")) { - if (disabledIntegTestTaskNames.contains(name) == false) { - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty "java.security.krb5.conf", "${project.configurations.krb5Config.getSingleFile().getPath()}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - new File(project.configurations.krb5Keytabs.singleFile, "hdfs_hdfs.build.elastic.co.keytab").getPath() - ) - } - } - - testClusters.matching { it.name == testTask.name }.configureEach { - if (testTask.name.contains("Secure")) { - systemProperty "java.security.krb5.conf", { configurations.krb5Config.singleFile.getPath() }, IGNORE_VALUE - extraConfigFile( - "repository-hdfs/krb5.keytab", - new File(project.configurations.krb5Keytabs.singleFile, "elasticsearch.keytab"), - IGNORE_VALUE - ) - } - } +tasks.named("check").configure { + dependsOn(tasks.withType(RestIntegTestTask)) } - tasks.named("thirdPartyAudit").configure { - ignoreMissingClasses() - ignoreViolations( - // internal java api: sun.misc.Unsafe - 'com.google.protobuf.MessageSchema', - 'com.google.protobuf.UnsafeUtil', - 'com.google.protobuf.UnsafeUtil$1', - 'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor', - 'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor', - 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'com.google.protobuf.UnsafeUtil$MemoryAccessor', - 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField', - 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1', - 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', - 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', - 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', - 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', - 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', - 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', - 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' - ) -} - -tasks.named('resolveAllDependencies') { - // This avoids spinning up the test fixture when downloading all dependencies - configs = project.configurations - [project.configurations.krb5Config] + ignoreMissingClasses() + ignoreViolations( + // internal java api: sun.misc.Unsafe + 'com.google.protobuf.MessageSchema', + 'com.google.protobuf.UnsafeUtil', + 'com.google.protobuf.UnsafeUtil$1', + 'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor', + 'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor', + 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'com.google.protobuf.UnsafeUtil$MemoryAccessor', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField', + 'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1', + 'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor' + ) } diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java new file mode 100644 index 0000000000000..d14cff30caef3 --- /dev/null +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/AbstractHaHdfsFailoverTestSuiteIT.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Assert; + +import java.io.IOException; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +abstract class AbstractHaHdfsFailoverTestSuiteIT extends ESRestTestCase { + + abstract HdfsFixture getHdfsFixture(); + + String securityCredentials() { + return ""; + } + + public void testHAFailoverWithRepository() throws Exception { + getHdfsFixture().setupHA(); + + RestClient client = client(); + + createRepository(client); + + // Get repository + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Failover the namenode to the second. + getHdfsFixture().failoverHDFS("nn1", "nn2"); + safeSleep(2000); + // Get repository again + response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + } + + private void createRepository(RestClient client) throws IOException { + Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); + request.setJsonEntity(Strings.format(""" + { + "type": "hdfs", + "settings": { + "uri": "hdfs://ha-hdfs/", + "path": "/user/elasticsearch/existing/readonly-repository", + "readonly": "true", + %s + "conf.dfs.nameservices": "ha-hdfs", + "conf.dfs.ha.namenodes.ha-hdfs": "nn1,nn2", + "conf.dfs.namenode.rpc-address.ha-hdfs.nn1": "localhost:%s", + "conf.dfs.namenode.rpc-address.ha-hdfs.nn2": "localhost:%s", + "conf.dfs.client.failover.proxy.provider.ha-hdfs":\ + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" + } + }""", securityCredentials(), getHdfsFixture().getPort(0), getHdfsFixture().getPort(1))); + Response response = client.performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + } + +} diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index cb8c4d65d88d6..7bd15ad64582f 100644 --- a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -8,271 +8,41 @@ package org.elasticsearch.repositories.hdfs; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ha.BadFencingConfigurationException; -import org.apache.hadoop.ha.HAServiceProtocol; -import org.apache.hadoop.ha.HAServiceTarget; -import org.apache.hadoop.ha.NodeFencer; -import org.apache.hadoop.ha.ZKFCProtocol; -import org.apache.hadoop.ha.protocolPB.HAServiceProtocolClientSideTranslatorPB; -import org.apache.hadoop.hdfs.tools.DFSHAAdmin; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.UserGroupInformation; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.Assert; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.AccessController; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.List; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; /** * Integration test that runs against an HA-Enabled HDFS instance */ -public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) +public class HaHdfsFailoverTestSuiteIT extends AbstractHaHdfsFailoverTestSuiteIT { - public void testHAFailoverWithRepository() throws Exception { - RestClient client = client(); + public static HdfsFixture hdfsFixture = new HdfsFixture().withHAService("ha-hdfs"); - String esKerberosPrincipal = System.getProperty("test.krb5.principal.es"); - String hdfsKerberosPrincipal = System.getProperty("test.krb5.principal.hdfs"); - String kerberosKeytabLocation = System.getProperty("test.krb5.keytab.hdfs"); - String ports = System.getProperty("test.hdfs-fixture.ports"); - String nn1Port = "10001"; - String nn2Port = "10002"; - if (ports.length() > 0) { - final Path path = PathUtils.get(ports); - final List lines = AccessController.doPrivileged((PrivilegedExceptionAction>) () -> { - return Files.readAllLines(path); - }); - nn1Port = lines.get(0); - nn2Port = lines.get(1); - } - boolean securityEnabled = hdfsKerberosPrincipal != null; + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); - Configuration hdfsConfiguration = new Configuration(); - hdfsConfiguration.set("dfs.nameservices", "ha-hdfs"); - hdfsConfiguration.set("dfs.ha.namenodes.ha-hdfs", "nn1,nn2"); - hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn1", "localhost:" + nn1Port); - hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn2", "localhost:" + nn2Port); - hdfsConfiguration.set( - "dfs.client.failover.proxy.provider.ha-hdfs", - "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" - ); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); - AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - if (securityEnabled) { - // ensure that keytab exists - Path kt = PathUtils.get(kerberosKeytabLocation); - if (Files.exists(kt) == false) { - throw new IllegalStateException("Could not locate keytab at " + kerberosKeytabLocation); - } - if (Files.isReadable(kt) != true) { - throw new IllegalStateException("Could not read keytab at " + kerberosKeytabLocation); - } - logger.info("Keytab Length: " + Files.readAllBytes(kt).length); - - // set principal names - hdfsConfiguration.set("dfs.namenode.kerberos.principal", hdfsKerberosPrincipal); - hdfsConfiguration.set("dfs.datanode.kerberos.principal", hdfsKerberosPrincipal); - hdfsConfiguration.set("dfs.data.transfer.protection", "authentication"); - - SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, hdfsConfiguration); - UserGroupInformation.setConfiguration(hdfsConfiguration); - UserGroupInformation.loginUserFromKeytab(hdfsKerberosPrincipal, kerberosKeytabLocation); - } else { - SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE, hdfsConfiguration); - UserGroupInformation.setConfiguration(hdfsConfiguration); - UserGroupInformation.getCurrentUser(); - } - return null; - }); - - // Create repository - { - Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); - request.setJsonEntity(Strings.format(""" - { - "type": "hdfs", - "settings": { - "uri": "hdfs://ha-hdfs/", - "path": "/user/elasticsearch/existing/readonly-repository", - "readonly": "true", - %s - "conf.dfs.nameservices": "ha-hdfs", - "conf.dfs.ha.namenodes.ha-hdfs": "nn1,nn2", - "conf.dfs.namenode.rpc-address.ha-hdfs.nn1": "localhost:%s", - "conf.dfs.namenode.rpc-address.ha-hdfs.nn2": "localhost:%s", - "conf.dfs.client.failover.proxy.provider.ha-hdfs": \ - "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" - } - }""", securityCredentials(securityEnabled, esKerberosPrincipal), nn1Port, nn2Port)); - Response response = client.performRequest(request); - - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - - // Get repository - { - Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - - // Failover the namenode to the second. - failoverHDFS("nn1", "nn2", hdfsConfiguration); - - // Get repository again - { - Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); - Assert.assertEquals(200, response.getStatusLine().getStatusCode()); - } - } - - private String securityCredentials(boolean securityEnabled, String kerberosPrincipal) { - if (securityEnabled) { - return String.format(java.util.Locale.ROOT, """ - "security.principal": "%s","conf.dfs.data.transfer.protection": "authentication",""", kerberosPrincipal); - } else { - return ""; - } - } - - /** - * Wraps an HAServiceTarget, keeping track of any HAServiceProtocol proxies it generates in order - * to close them at the end of the test lifecycle. - */ - private static class CloseableHAServiceTarget extends HAServiceTarget { - private final HAServiceTarget delegate; - private final List protocolsToClose = new ArrayList<>(); - - CloseableHAServiceTarget(HAServiceTarget delegate) { - this.delegate = delegate; - } - - @Override - public InetSocketAddress getAddress() { - return delegate.getAddress(); - } - - @Override - public InetSocketAddress getHealthMonitorAddress() { - return delegate.getHealthMonitorAddress(); - } - - @Override - public InetSocketAddress getZKFCAddress() { - return delegate.getZKFCAddress(); - } - - @Override - public NodeFencer getFencer() { - return delegate.getFencer(); - } - - @Override - public void checkFencingConfigured() throws BadFencingConfigurationException { - delegate.checkFencingConfigured(); - } - - @Override - public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) throws IOException { - HAServiceProtocol proxy = delegate.getProxy(conf, timeoutMs); - protocolsToClose.add(proxy); - return proxy; - } - - @Override - public HAServiceProtocol getHealthMonitorProxy(Configuration conf, int timeoutMs) throws IOException { - return delegate.getHealthMonitorProxy(conf, timeoutMs); - } - - @Override - public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) throws IOException { - return delegate.getZKFCProxy(conf, timeoutMs); - } - - @Override - public boolean isAutoFailoverEnabled() { - return delegate.isAutoFailoverEnabled(); - } - - private void close() { - for (HAServiceProtocol protocol : protocolsToClose) { - if (protocol instanceof HAServiceProtocolClientSideTranslatorPB haServiceProtocolClientSideTranslatorPB) { - haServiceProtocolClientSideTranslatorPB.close(); - } - } - } - } - - /** - * The default HAAdmin tool does not throw exceptions on failures, and does not close any client connection - * resources when it concludes. This subclass overrides the tool to allow for exception throwing, and to - * keep track of and clean up connection resources. - */ - private static class CloseableHAAdmin extends DFSHAAdmin { - private final List serviceTargets = new ArrayList<>(); - - @Override - protected HAServiceTarget resolveTarget(String nnId) { - CloseableHAServiceTarget target = new CloseableHAServiceTarget(super.resolveTarget(nnId)); - serviceTargets.add(target); - return target; - } - - @Override - public int run(String[] argv) throws Exception { - return runCmd(argv); - } - - public int transitionToStandby(String namenodeID) throws Exception { - return run(new String[] { "-transitionToStandby", namenodeID }); - } - - public int transitionToActive(String namenodeID) throws Exception { - return run(new String[] { "-transitionToActive", namenodeID }); - } - - public void close() { - for (CloseableHAServiceTarget serviceTarget : serviceTargets) { - serviceTarget.close(); - } - } + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); } - /** - * Performs a two-phase leading namenode transition. - * @param from Namenode ID to transition to standby - * @param to Namenode ID to transition to active - * @param configuration Client configuration for HAAdmin tool - * @throws IOException In the event of a raised exception during namenode failover. - */ - private void failoverHDFS(String from, String to, Configuration configuration) throws IOException { - logger.info("Swapping active namenodes: [{}] to standby and [{}] to active", from, to); - try { - AccessController.doPrivileged((PrivilegedExceptionAction) () -> { - CloseableHAAdmin haAdmin = new CloseableHAAdmin(); - haAdmin.setConf(configuration); - try { - haAdmin.transitionToStandby(from); - haAdmin.transitionToActive(to); - } finally { - haAdmin.close(); - } - return null; - }); - } catch (PrivilegedActionException pae) { - throw new IOException("Unable to perform namenode failover", pae); - } + @Override + HdfsFixture getHdfsFixture() { + return hdfsFixture; } } diff --git a/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java new file mode 100644 index 0000000000000..8ba27f703c419 --- /dev/null +++ b/plugins/repository-hdfs/src/javaRestTest/java/org/elasticsearch/repositories/hdfs/SecureHaHdfsFailoverTestSuiteIT.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class SecureHaHdfsFailoverTestSuiteIT extends AbstractHaHdfsFailoverTestSuiteIT { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withHAService("ha-hdfs") + .withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + HdfsFixture getHdfsFixture() { + return hdfsFixture; + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + protected String securityCredentials() { + return String.format(java.util.Locale.ROOT, """ + "security.principal": "%s","conf.dfs.data.transfer.protection": "authentication",""", krb5Fixture.getEsPrincipal()); + } + +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 592192f29c262..ee1e54e8a3356 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import org.hamcrest.CoreMatchers; import org.mockito.AdditionalMatchers; import org.mockito.Mockito; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java index fed4411f68768..a52724496289a 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; import java.util.Collections; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java index cd38cc04e6b31..a6d2bdcf8a1d4 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.AbstractThirdPartyRepositoryTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index 313dcdd6623c4..0e2ec25b6cfaa 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; import java.util.Collection; diff --git a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java index bdc6368bb5719..a0a4d9379bc78 100644 --- a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java +++ b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java @@ -5,22 +5,52 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ + package org.elasticsearch.repositories.hdfs; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Map; +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) public class RepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); + public RepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @ParametersFactory public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); + return createParameters(new String[] { "hdfs_repository" }, Map.of("hdfs_port", hdfsFixture.getPort())); } } diff --git a/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java new file mode 100644 index 0000000000000..45b992a35d731 --- /dev/null +++ b/plugins/repository-hdfs/src/yamlRestTest/java/org/elasticsearch/repositories/hdfs/SecureRepositoryHdfsClientYamlTestSuiteIT.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.Map; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureRepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + public SecureRepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return createParameters(new String[] { "secure_hdfs_repository" }, Map.of("secure_hdfs_port", hdfsFixture.getPort())); + } +} diff --git a/settings.gradle b/settings.gradle index 97cce0a476d99..48e3794c9005d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -90,8 +90,7 @@ List projects = [ 'test:framework', 'test:fixtures:azure-fixture', 'test:fixtures:gcs-fixture', - 'test:fixtures:hdfs2-fixture', - 'test:fixtures:hdfs3-fixture', + 'test:fixtures:hdfs-fixture', 'test:fixtures:krb5kdc-fixture', 'test:fixtures:minio-fixture', 'test:fixtures:old-elasticsearch', diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle new file mode 100644 index 0000000000000..bd4acf4e51505 --- /dev/null +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +apply plugin: 'elasticsearch.java' +apply plugin: 'com.github.johnrengelman.shadow' + +import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar + +configurations { +// all { +// transitive = true +// } + hdfs2 + hdfs3 + consumable("shadowedHdfs2") +} + +dependencies { + compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5") + api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") { + transitive false + } + compileOnly "junit:junit:${versions.junit}" + hdfs2 "org.apache.hadoop:hadoop-minicluster:2.8.5" + hdfs3 "org.apache.hadoop:hadoop-minicluster:3.3.1" + +} + +tasks.named("shadowJar").configure { + archiveClassifier.set("hdfs3") + // fix issues with signed jars + + relocate("org.apache.hadoop", "fixture.hdfs3.org.apache.hadoop") { + exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" + exclude "org.apache.hadoop.ipc.StandbyException" + } + configurations << project.configurations.hdfs3 +} + +def hdfs2Jar = tasks.register("hdfs2jar", ShadowJar) { + relocate("org.apache.hadoop", "fixture.hdfs2.org.apache.hadoop") { + exclude "org.apache.hadoop.hdfs.protocol.ClientProtocol" + exclude "org.apache.hadoop.ipc.StandbyException" + } + archiveClassifier.set("hdfs2") + from sourceSets.main.output + configurations << project.configurations.hdfs2 +} + +tasks.withType(ShadowJar) { + dependencies { +// exclude(dependency('commons-io:commons-io:2.8.0')) + exclude(dependency("com.carrotsearch.randomizedtesting:randomizedtesting-runner:.*")) + exclude(dependency("junit:junit:.*")) + exclude(dependency("org.slf4j:slf4j-api:.*")) + exclude(dependency("com.google.guava:guava:.*")) + exclude(dependency("org.apache.commons:commons-compress:.*")) + exclude(dependency("commons-logging:commons-logging:.*")) + exclude(dependency("commons-codec:commons-codec:.*")) + exclude(dependency("org.apache.httpcomponents:httpclient:.*")) + exclude(dependency("org.apache.httpcomponents:httpcore:.*")) + exclude(dependency("org.apache.logging.log4j:log4j-1.2-api:.*")) + exclude(dependency("log4j:log4j:.*")) + exclude(dependency("io.netty:.*:.*")) + exclude(dependency("com.nimbusds:nimbus-jose-jwt:.*")) + exclude(dependency("commons-cli:commons-cli:1.2")) + exclude(dependency("net.java.dev.jna:jna:.*")) + exclude(dependency("org.objenesis:objenesis:.*")) + exclude(dependency('com.fasterxml.jackson.core:.*:.*')) + } + + transform(org.elasticsearch.gradle.internal.shadow.XmlClassRelocationTransformer.class) { + resource = "core-default.xml" + enabled = true + } + + transform(org.elasticsearch.gradle.internal.shadow.XmlClassRelocationTransformer.class) { + resource = "hdfs-default.xml" + enabled = true + } +} + +artifacts { + shadowedHdfs2(hdfs2Jar) +} diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java similarity index 77% rename from plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java rename to test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java index c79418557da20..be63e22742ed5 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsClientThreadLeakFilter.java +++ b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsClientThreadLeakFilter.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.repositories.hdfs; +package org.elasticsearch.test.fixtures.hdfs; import com.carrotsearch.randomizedtesting.ThreadFilter; @@ -29,6 +29,11 @@ public final class HdfsClientThreadLeakFilter implements ThreadFilter { @Override public boolean reject(Thread t) { - return t.getName().equals(OFFENDING_THREAD_NAME); + return t.getName().contains(OFFENDING_THREAD_NAME) + || t.getName().startsWith("LeaseRenewer") + || t.getName().startsWith("SSL Certificates Store Monitor") // hadoop 3 brings that in + || t.getName().startsWith("GcTimeMonitor") // hadoop 3 + || t.getName().startsWith("Command processor") // hadoop 3 + || t.getName().startsWith("ForkJoinPool-"); // hadoop 3 } } diff --git a/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java new file mode 100644 index 0000000000000..18d406e2a97a0 --- /dev/null +++ b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java @@ -0,0 +1,438 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures.hdfs; + +import org.apache.commons.io.FileUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.ha.BadFencingConfigurationException; +import org.apache.hadoop.ha.HAServiceProtocol; +import org.apache.hadoop.ha.HAServiceTarget; +import org.apache.hadoop.ha.NodeFencer; +import org.apache.hadoop.ha.ZKFCProtocol; +import org.apache.hadoop.ha.protocolPB.HAServiceProtocolClientSideTranslatorPB; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.MiniDFSNNTopology; +import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; +import org.apache.hadoop.hdfs.tools.DFSHAAdmin; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.junit.Assume; +import org.junit.rules.ExternalResource; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public class HdfsFixture extends ExternalResource { + + private static final Logger LOGGER = LoggerFactory.getLogger(HdfsFixture.class); + + private TemporaryFolder temporaryFolder = new TemporaryFolder(); + private MiniDFSCluster dfs; + private String haNameService; + private Supplier principalConfig = null; + private Supplier keytab = null; + private Configuration cfg; + + private Configuration haConfiguration; + private int explicitPort = findAvailablePort(); + + public HdfsFixture withHAService(String haNameService) { + this.haNameService = haNameService; + return this; + } + + public HdfsFixture withKerberos(Supplier principalConfig, Supplier keytabFile) { + this.principalConfig = principalConfig; + this.keytab = keytabFile; + return this; + } + + @Override + protected void before() throws Throwable { + temporaryFolder.create(); + assumeHdfsAvailable(); + startMinHdfs(); + } + + private void assumeHdfsAvailable() { + boolean fixtureSupported = false; + if (isWindows()) { + // hdfs fixture will not start without hadoop native libraries on windows + String nativePath = System.getenv("HADOOP_HOME"); + if (nativePath != null) { + java.nio.file.Path path = Paths.get(nativePath); + if (Files.isDirectory(path) + && Files.exists(path.resolve("bin").resolve("winutils.exe")) + && Files.exists(path.resolve("bin").resolve("hadoop.dll")) + && Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { + fixtureSupported = true; + } else { + throw new IllegalStateException( + "HADOOP_HOME: " + path + " is invalid, does not contain hadoop native libraries in " + nativePath + "/bin" + ); + } + } + } else { + fixtureSupported = true; + } + + boolean nonLegalegalPath = temporaryFolder.getRoot().getAbsolutePath().contains(" "); + if (nonLegalegalPath) { + fixtureSupported = false; + } + + Assume.assumeTrue("HDFS Fixture is not supported", fixtureSupported); + } + + private boolean isWindows() { + return System.getProperty("os.name").toLowerCase().startsWith("windows"); + } + + /** + * Performs a two-phase leading namenode transition. + * @param from Namenode ID to transition to standby + * @param to Namenode ID to transition to active + * @throws IOException In the event of a raised exception during namenode failover. + */ + public void failoverHDFS(String from, String to) throws IOException { + assert isHA() && haConfiguration != null : "HA Configuration must be set up before performing failover"; + LOGGER.info("Swapping active namenodes: [{}] to standby and [{}] to active", from, to); + try { + AccessController.doPrivileged((PrivilegedExceptionAction) () -> { + CloseableHAAdmin haAdmin = new CloseableHAAdmin(); + haAdmin.setConf(haConfiguration); + try { + haAdmin.transitionToStandby(from); + haAdmin.transitionToActive(to); + } finally { + haAdmin.close(); + } + return null; + }); + } catch (PrivilegedActionException pae) { + throw new IOException("Unable to perform namenode failover", pae); + } + } + + public void setupHA() throws IOException { + assert isHA() : "HA Name Service must be set up before setting up HA"; + haConfiguration = new Configuration(); + haConfiguration.set("dfs.nameservices", haNameService); + haConfiguration.set("dfs.ha.namenodes.ha-hdfs", "nn1,nn2"); + haConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn1", "localhost:" + getPort(0)); + haConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn2", "localhost:" + (getPort(1))); + haConfiguration.set( + "dfs.client.failover.proxy.provider.ha-hdfs", + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" + ); + + if (isSecure()) { + // ensure that keytab exists + Path kt = this.keytab.get(); + if (Files.exists(kt) == false) { + throw new IllegalStateException("Could not locate keytab at " + keytab.get()); + } + if (Files.isReadable(kt) != true) { + throw new IllegalStateException("Could not read keytab at " + keytab.get()); + } + LOGGER.info("Keytab Length: " + Files.readAllBytes(kt).length); + + // set principal names + String hdfsKerberosPrincipal = principalConfig.get(); + haConfiguration.set("dfs.namenode.kerberos.principal", hdfsKerberosPrincipal); + haConfiguration.set("dfs.datanode.kerberos.principal", hdfsKerberosPrincipal); + haConfiguration.set("dfs.data.transfer.protection", "authentication"); + + SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, haConfiguration); + UserGroupInformation.setConfiguration(haConfiguration); + UserGroupInformation.loginUserFromKeytab(hdfsKerberosPrincipal, keytab.get().toString()); + } else { + SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE, haConfiguration); + UserGroupInformation.setConfiguration(haConfiguration); + UserGroupInformation.getCurrentUser(); + } + } + + private void startMinHdfs() throws Exception { + Path baseDir = temporaryFolder.newFolder("baseDir").toPath(); + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } + // hdfs-data/, where any data is going + Path hdfsHome = baseDir.resolve("hdfs-data"); + new File(hdfsHome.toFile(), "data").mkdirs(); + // configure cluster + cfg = new Configuration(); + cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); + cfg.set("hadoop.security.group.mapping", "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback"); + + // optionally configure security + if (isSecure()) { + String kerberosPrincipal = principalConfig.get(); + String keytabFilePath = keytab.get().toString(); + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); + cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFilePath); + cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFilePath); + cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); + cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); + cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); + } + refreshKrb5Config(); + UserGroupInformation.setConfiguration(cfg); + + MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); + // if(isSecure()) { + builder.nameNodePort(explicitPort); + // } else { + // builder.nameNodePort(explicitPort); + // } + if (isHA()) { + MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); + MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); + MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); + MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); + builder.nnTopology(namenodeTopology); + } + dfs = builder.build(); + // Configure contents of the filesystem + org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); + FileSystem fs; + if (isHA()) { + dfs.transitionToActive(0); + fs = HATestUtil.configureFailoverFs(dfs, cfg); + } else { + fs = dfs.getFileSystem(0); + } + + try { + // Set the elasticsearch user directory up + fs.mkdirs(esUserPath); + if (UserGroupInformation.isSecurityEnabled()) { + List acls = new ArrayList<>(); + acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); + fs.modifyAclEntries(esUserPath, acls); + } + + // Install a pre-existing repository into HDFS + String directoryName = "readonly-repository"; + String archiveName = directoryName + ".tar.gz"; + URL readOnlyRepositoryArchiveURL = getClass().getClassLoader().getResource(archiveName); + if (readOnlyRepositoryArchiveURL != null) { + Path tempDirectory = Files.createTempDirectory(getClass().getName()); + File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); + FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); + FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); + + fs.copyFromLocalFile( + true, + true, + new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), + esUserPath.suffix("/existing/" + directoryName) + ); + + FileUtils.deleteDirectory(tempDirectory.toFile()); + } + } finally { + fs.close(); + } + } + + private boolean isSecure() { + return keytab != null && principalConfig != null; + } + + @Override + protected void after() { + if (dfs != null) { + try { + if (isHA()) { + dfs.getFileSystem(0).close(); + dfs.getFileSystem(1).close(); + } else { + dfs.getFileSystem().close(); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + dfs.close(); + } + temporaryFolder.delete(); + } + + private boolean isHA() { + return haNameService != null; + } + + public int getPort() { + return dfs == null ? explicitPort : dfs.getNameNodePort(0); + } + + // fix port handling to allow parallel hdfs fixture runs + public int getPort(int i) { + return dfs.getNameNodePort(i); + } + + /** + * Wraps an HAServiceTarget, keeping track of any HAServiceProtocol proxies it generates in order + * to close them at the end of the test lifecycle. + */ + protected static class CloseableHAServiceTarget extends HAServiceTarget { + private final HAServiceTarget delegate; + private final List protocolsToClose = new ArrayList<>(); + + CloseableHAServiceTarget(HAServiceTarget delegate) { + this.delegate = delegate; + } + + @Override + public InetSocketAddress getAddress() { + return delegate.getAddress(); + } + + @Override + public InetSocketAddress getHealthMonitorAddress() { + return delegate.getHealthMonitorAddress(); + } + + @Override + public InetSocketAddress getZKFCAddress() { + return delegate.getZKFCAddress(); + } + + @Override + public NodeFencer getFencer() { + return delegate.getFencer(); + } + + @Override + public void checkFencingConfigured() throws BadFencingConfigurationException { + delegate.checkFencingConfigured(); + } + + @Override + public HAServiceProtocol getProxy(Configuration conf, int timeoutMs) throws IOException { + HAServiceProtocol proxy = delegate.getProxy(conf, timeoutMs); + protocolsToClose.add(proxy); + return proxy; + } + + @Override + public HAServiceProtocol getHealthMonitorProxy(Configuration conf, int timeoutMs) throws IOException { + return delegate.getHealthMonitorProxy(conf, timeoutMs); + } + + @Override + public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs) throws IOException { + return delegate.getZKFCProxy(conf, timeoutMs); + } + + @Override + public boolean isAutoFailoverEnabled() { + return delegate.isAutoFailoverEnabled(); + } + + private void close() { + for (HAServiceProtocol protocol : protocolsToClose) { + if (protocol instanceof HAServiceProtocolClientSideTranslatorPB haServiceProtocolClientSideTranslatorPB) { + haServiceProtocolClientSideTranslatorPB.close(); + } + } + } + } + + /** + * The default HAAdmin tool does not throw exceptions on failures, and does not close any client connection + * resources when it concludes. This subclass overrides the tool to allow for exception throwing, and to + * keep track of and clean up connection resources. + */ + protected static class CloseableHAAdmin extends DFSHAAdmin { + private final List serviceTargets = new ArrayList<>(); + + @Override + protected HAServiceTarget resolveTarget(String nnId) { + CloseableHAServiceTarget target = new CloseableHAServiceTarget(super.resolveTarget(nnId)); + serviceTargets.add(target); + return target; + } + + @Override + public int run(String[] argv) throws Exception { + return runCmd(argv); + } + + public int transitionToStandby(String namenodeID) throws Exception { + return run(new String[] { "-transitionToStandby", namenodeID }); + } + + public int transitionToActive(String namenodeID) throws Exception { + return run(new String[] { "-transitionToActive", namenodeID }); + } + + public void close() { + for (CloseableHAServiceTarget serviceTarget : serviceTargets) { + serviceTarget.close(); + } + } + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static void refreshKrb5Config() throws ClassNotFoundException, NoSuchMethodException, IllegalArgumentException, + IllegalAccessException, InvocationTargetException, InvocationTargetException { + Class classRef; + if (System.getProperty("java.vendor").contains("IBM")) { + classRef = Class.forName("com.ibm.security.krb5.internal.Config"); + } else { + classRef = Class.forName("sun.security.krb5.Config"); + } + + Method refreshMethod = classRef.getMethod("refresh"); + refreshMethod.invoke(classRef); + } + + private static int findAvailablePort() { + try (ServerSocket socket = new ServerSocket(0)) { + return socket.getLocalPort(); + } catch (Exception ex) { + LOGGER.error("Failed to find available port", ex); + } + return -1; + } + +} diff --git a/test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz similarity index 100% rename from test/fixtures/hdfs2-fixture/src/main/resources/readonly-repository.tar.gz rename to test/fixtures/hdfs-fixture/src/main/resources/readonly-repository.tar.gz diff --git a/test/fixtures/hdfs2-fixture/build.gradle b/test/fixtures/hdfs2-fixture/build.gradle deleted file mode 100644 index 43d14a38c5e3e..0000000000000 --- a/test/fixtures/hdfs2-fixture/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -apply plugin: 'elasticsearch.java' - -dependencies { - api "org.apache.hadoop:hadoop-minicluster:2.8.5" -} diff --git a/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java deleted file mode 100644 index ee993fec74eb4..0000000000000 --- a/test/fixtures/hdfs2-fixture/src/main/java/hdfs/MiniHDFS.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package hdfs; - -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.permission.AclEntry; -import org.apache.hadoop.fs.permission.AclEntryType; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.MiniDFSNNTopology; -import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.File; -import java.lang.management.ManagementFactory; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * MiniHDFS test fixture. There is a CLI tool, but here we can - * easily properly setup logging, avoid parsing JSON, etc. - */ -public class MiniHDFS { - - private static String PORT_FILE_NAME = "ports"; - private static String PID_FILE_NAME = "pid"; - - public static void main(String[] args) throws Exception { - if (args.length != 1 && args.length != 3) { - throw new IllegalArgumentException( - "Expected: MiniHDFS [ ], got: " + Arrays.toString(args) - ); - } - boolean secure = args.length == 3; - - // configure Paths - Path baseDir = Paths.get(args[0]); - // hadoop-home/, so logs will not complain - if (System.getenv("HADOOP_HOME") == null) { - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); - } - // hdfs-data/, where any data is going - Path hdfsHome = baseDir.resolve("hdfs-data"); - - // configure cluster - Configuration cfg = new Configuration(); - cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); - // lower default permission: TODO: needed? - cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); - - // optionally configure security - if (secure) { - String kerberosPrincipal = args[1]; - String keytabFile = args[2]; - - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); - cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); - } - - UserGroupInformation.setConfiguration(cfg); - - MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); - String explicitPort = System.getProperty("hdfs.config.port"); - if (explicitPort != null) { - builder.nameNodePort(Integer.parseInt(explicitPort)); - } else { - if (secure) { - builder.nameNodePort(9998); - } else { - builder.nameNodePort(9999); - } - } - - // Configure HA mode - String haNameService = System.getProperty("ha-nameservice"); - boolean haEnabled = haNameService != null; - if (haEnabled) { - MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); - MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); - MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); - MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); - builder.nnTopology(namenodeTopology); - } - - MiniDFSCluster dfs = builder.build(); - - // Configure contents of the filesystem - org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); - - FileSystem fs; - if (haEnabled) { - dfs.transitionToActive(0); - fs = HATestUtil.configureFailoverFs(dfs, cfg); - } else { - fs = dfs.getFileSystem(); - } - - try { - // Set the elasticsearch user directory up - fs.mkdirs(esUserPath); - if (UserGroupInformation.isSecurityEnabled()) { - List acls = new ArrayList<>(); - acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); - fs.modifyAclEntries(esUserPath, acls); - } - - // Install a pre-existing repository into HDFS - String directoryName = "readonly-repository"; - String archiveName = directoryName + ".tar.gz"; - URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); - if (readOnlyRepositoryArchiveURL != null) { - Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); - File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); - FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); - FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); - - fs.copyFromLocalFile( - true, - true, - new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), - esUserPath.suffix("/existing/" + directoryName) - ); - - FileUtils.deleteDirectory(tempDirectory.toFile()); - } - } finally { - fs.close(); - } - - // write our PID file - Path tmp = Files.createTempFile(baseDir, null, null); - String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; - Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - - // write our port file - String portFileContent = Integer.toString(dfs.getNameNodePort(0)); - if (haEnabled) { - portFileContent = portFileContent + "\n" + Integer.toString(dfs.getNameNodePort(1)); - } - tmp = Files.createTempFile(baseDir, null, null); - Files.write(tmp, portFileContent.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - } - -} diff --git a/test/fixtures/hdfs3-fixture/build.gradle b/test/fixtures/hdfs3-fixture/build.gradle deleted file mode 100644 index 872ab2efd42ab..0000000000000 --- a/test/fixtures/hdfs3-fixture/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -apply plugin: 'elasticsearch.java' - -dependencies { - api "org.apache.hadoop:hadoop-minicluster:3.3.1" -} diff --git a/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java deleted file mode 100644 index 0a26f5d82ac17..0000000000000 --- a/test/fixtures/hdfs3-fixture/src/main/java/hdfs/MiniHDFS.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package hdfs; - -import org.apache.commons.io.FileUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.permission.AclEntry; -import org.apache.hadoop.fs.permission.AclEntryType; -import org.apache.hadoop.fs.permission.FsAction; -import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.MiniDFSNNTopology; -import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.File; -import java.lang.management.ManagementFactory; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * MiniHDFS test fixture. There is a CLI tool, but here we can - * easily properly setup logging, avoid parsing JSON, etc. - */ -public class MiniHDFS { - - private static String PORT_FILE_NAME = "ports"; - private static String PID_FILE_NAME = "pid"; - - public static void main(String[] args) throws Exception { - if (args.length != 1 && args.length != 3) { - throw new IllegalArgumentException( - "Expected: MiniHDFS [ ], got: " + Arrays.toString(args) - ); - } - boolean secure = args.length == 3; - - // configure Paths - Path baseDir = Paths.get(args[0]); - // hadoop-home/, so logs will not complain - if (System.getenv("HADOOP_HOME") == null) { - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); - } - // hdfs-data/, where any data is going - Path hdfsHome = baseDir.resolve("hdfs-data"); - - // configure cluster - Configuration cfg = new Configuration(); - cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); - // lower default permission: TODO: needed? - cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); - - // optionally configure security - if (secure) { - String kerberosPrincipal = args[1]; - String keytabFile = args[2]; - - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); - cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); - cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); - cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_KEY, "true"); - cfg.set(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY, "AES/CTR/NoPadding"); - } - - UserGroupInformation.setConfiguration(cfg); - - MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); - String explicitPort = System.getProperty("hdfs.config.port"); - if (explicitPort != null) { - builder.nameNodePort(Integer.parseInt(explicitPort)); - } else { - if (secure) { - builder.nameNodePort(9998); - } else { - builder.nameNodePort(9999); - } - } - - // Configure HA mode - String haNameService = System.getProperty("ha-nameservice"); - boolean haEnabled = haNameService != null; - if (haEnabled) { - MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); - MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); - MiniDFSNNTopology.NSConf nameservice = new MiniDFSNNTopology.NSConf(haNameService).addNN(nn1).addNN(nn2); - MiniDFSNNTopology namenodeTopology = new MiniDFSNNTopology().addNameservice(nameservice); - builder.nnTopology(namenodeTopology); - } - - MiniDFSCluster dfs = builder.build(); - - // Configure contents of the filesystem - org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); - - FileSystem fs; - if (haEnabled) { - dfs.transitionToActive(0); - fs = HATestUtil.configureFailoverFs(dfs, cfg); - } else { - fs = dfs.getFileSystem(); - } - - try { - // Set the elasticsearch user directory up - fs.mkdirs(esUserPath); - if (UserGroupInformation.isSecurityEnabled()) { - List acls = new ArrayList<>(); - acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch").setPermission(FsAction.ALL).build()); - fs.modifyAclEntries(esUserPath, acls); - } - - // Install a pre-existing repository into HDFS - String directoryName = "readonly-repository"; - String archiveName = directoryName + ".tar.gz"; - URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); - if (readOnlyRepositoryArchiveURL != null) { - Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); - File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); - FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); - FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); - - fs.copyFromLocalFile( - true, - true, - new org.apache.hadoop.fs.Path(tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), - esUserPath.suffix("/existing/" + directoryName) - ); - - FileUtils.deleteDirectory(tempDirectory.toFile()); - } - } finally { - fs.close(); - } - - // write our PID file - Path tmp = Files.createTempFile(baseDir, null, null); - String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; - Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - - // write our port file - String portFileContent = Integer.toString(dfs.getNameNodePort(0)); - if (haEnabled) { - portFileContent = portFileContent + "\n" + Integer.toString(dfs.getNameNodePort(1)); - } - tmp = Files.createTempFile(baseDir, null, null); - Files.write(tmp, portFileContent.getBytes(StandardCharsets.UTF_8)); - Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); - } - -} diff --git a/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz b/test/fixtures/hdfs3-fixture/src/main/resources/readonly-repository.tar.gz deleted file mode 100644 index 2cdb6d77c07d0a06029521d60fdc966e92248d72..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1314 zcmV+-1>O1|iwFQ)qqIjP0S`X&6>Cgu-pu#=zCXvJBxxF2uE(NTlc~08Xdnk?@>NJB3M=Q zuNq0Z+=H672A4-jl4|Q;ka1D}I20B!*MB%1#%+P-J;5nlaQ&|anL)BFW7;e@@cL@d z;9>|4lm`_Y><&7Pn&e|?6cwk4u`qL2pO1@^%W^cX^bhg#Tr@Vn39rp|2l?Jn6O_-j z8cY(KxJU11A_|i_)$|yPjlC8Q%|xOoI-PcJKC*nRC1%&w*3Mf{k0~|nzgOc*Wp*>x zsAvBOqvQ53U@Rc8e*k#c)Ubcohc_(`{O|c+$^MZb2$27^f@=ELsZDyShvHry)Ei!- zp8vZEp*>Lxs&nYBAL`e!Q2^xU8t9 zxanZrk&+7&x9F?WPS5E0Z7t3EQn8ikZJX*@FL5Xw92|W6(ucp7*13QE%6I4Znm`Sz z*}t2Tx6=MaQ9NP)vIO?u3OxHS)nIKU``;>!6xQ8>aN@8WT=%TM`sT; zR+8<$$D9P8jisFO&{m45nq%=L_K&-p$U9SW=P1=jhtu-E>NR`gX<@_tKS2z+{U6Bx z9zJK#KWcObG_HRMi3s%H3eKSahRcI`@n7bf<5>I`F_J`x|E<7{|4;i3{}WZf`mqAw z`}Zx=u~j3j7>-^FzT51Wh6`7v!jq8^nL1I z!6%yI1%;VX`0mrx1Yv{d~0;C*RxOB$`T?TfXdMEmbF0PT?hw z&h~cq`T7dW83~1Gnwjw4j1}HOd-3RKR52$e;xRimsSW4S^NC?`acRGz09ttP&;4`M zb1lx~{}rVn)2wIzf+SSr|B{4du>V%z=l?XVQ^RKbf5*)~^Id=Z@Yz`Z8&|%(+Wpmd zJMQQxUHnA($5XaN45pcBuN&~}(1N+R6pP%N877N}nz9yorSI<2Gv$lY)Zk)uO{N`R z)ycBr7>uN-LLA%V)qD`*?)r#!g}@}s{H;VVgW(_uO(FP|5bMVc07--6cgy(lfAod*4;l3 z-aSm)JqNOzFLv services = ["peppa", "hdfs"] - -tasks.named("preProcessFixture").configure { - doLast { - // We need to create these up-front because if docker creates them they will be owned by root and we won't be - // able to clean them up - services.each { fixturesDir.dir("shared/${it}").get().getAsFile().mkdirs() } - } -} - -tasks.named("postProcessFixture").configure { task -> - inputs.dir(fixturesDir.dir('shared').get().getAsFile()) - services.each { service -> - File confTemplate = fixturesDir.file("shared/${service}/krb5.conf.template").get().asFile - File confFile = fixturesDir.file("shared/${service}/krb5.conf").get().asFile - outputs.file(confFile) - doLast { - assert confTemplate.exists() - String confContents = confTemplate.text - .replace("\${MAPPED_PORT}", "${ext."test.fixtures.${service}.udp.88"}") - confFile.text = confContents - } +dockerFixtures { + krb5dc { + dockerContext = projectDir + version = "1.0" + baseImages = ["ubuntu:14.04"] } } -project.ext.krb5Conf = { s -> file("$testFixturesDir/shared/${s}/krb5.conf") } -project.ext.krb5Keytabs = { s, fileName -> file("$testFixturesDir/shared/${s}/keytabs/${fileName}") } - configurations { + all { + transitive = false + } krb5ConfHdfsFile { canBeConsumed = true canBeResolved = false @@ -49,11 +34,24 @@ configurations { } } -artifacts { - krb5ConfHdfsFile(krb5Conf('hdfs')) { - builtBy("postProcessFixture") - } - krb5KeytabsHdfsDir(file("$testFixturesDir/shared/hdfs/keytabs/")) { - builtBy("postProcessFixture") - } +dependencies { + testImplementation project(':test:framework') + + api "junit:junit:${versions.junit}" + api project(':test:fixtures:testcontainer-utils') + api "org.testcontainers:testcontainers:${versions.testcontainer}" + implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + implementation "org.slf4j:slf4j-api:${versions.slf4j}" + implementation "com.github.docker-java:docker-java-api:${versions.dockerJava}" + implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + + runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" + runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" + runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + + // ensure we have proper logging during when used in tests + runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" } diff --git a/test/fixtures/krb5kdc-fixture/docker-compose.yml b/test/fixtures/krb5kdc-fixture/docker-compose.yml deleted file mode 100644 index 9e2d67000532e..0000000000000 --- a/test/fixtures/krb5kdc-fixture/docker-compose.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: '3' -services: - peppa: - hostname: kerberos.build.elastic.co - build: - context: . - dockerfile: Dockerfile - extra_hosts: - - "kerberos.build.elastic.co:127.0.0.1" - command: "bash /fixture/src/main/resources/provision/peppa.sh" - volumes: - - ./testfixtures_shared/shared/peppa:/fixture/build - # containers have bad entropy so mount /dev/urandom. Less secure but this is a test fixture. - - /dev/urandom:/dev/random - ports: - - "4444" - - "88/udp" - hdfs: - hostname: kerberos.build.elastic.co - build: - context: . - dockerfile: Dockerfile - extra_hosts: - - "kerberos.build.elastic.co:127.0.0.1" - command: "bash /fixture/src/main/resources/provision/hdfs.sh" - volumes: - - ./testfixtures_shared/shared/hdfs:/fixture/build - # containers have bad entropy so mount /dev/urandom. Less secure but this is a test fixture. - - /dev/urandom:/dev/random - ports: - - "4444" - - "88/udp" diff --git a/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java b/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java new file mode 100644 index 0000000000000..fa75b57ea87a6 --- /dev/null +++ b/test/fixtures/krb5kdc-fixture/src/main/java/org/elasticsearch/test/fixtures/krb5kdc/Krb5kDcContainer.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures.krb5kdc; + +import com.github.dockerjava.api.model.ExposedPort; +import com.github.dockerjava.api.model.Ports; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.junit.rules.TemporaryFolder; +import org.testcontainers.containers.Network; +import org.testcontainers.images.RemoteDockerImage; +import org.testcontainers.shaded.org.apache.commons.io.IOUtils; +import org.testcontainers.utility.MountableFile; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public final class Krb5kDcContainer extends DockerEnvironmentAwareTestContainer { + public static final String DOCKER_BASE_IMAGE = "docker.elastic.co/elasticsearch-dev/krb5dc-fixture:1.0"; + private final TemporaryFolder temporaryFolder = new TemporaryFolder(); + private final ProvisioningId provisioningId; + private Path krb5ConfFile; + private Path keytabFile; + private Path esKeytabFile; + + public enum ProvisioningId { + HDFS( + "hdfs", + "/fixture/src/main/resources/provision/hdfs.sh", + "/fixture/build/keytabs/hdfs_hdfs.build.elastic.co.keytab", + "/fixture/build/keytabs/elasticsearch.keytab", + "hdfs/hdfs.build.elastic.co@BUILD.ELASTIC.CO" + ), + PEPPA( + "peppa", + "/fixture/src/main/resources/provision/peppa.sh", + "/fixture/build/keytabs/peppa.keytab", + "/fixture/build/keytabs/HTTP_localhost.keytab", + "peppa@BUILD.ELASTIC.CO" + ); + + private final String id; + private final String scriptPath; + private final String keytabPath; + public final String esKeytab; + private final String keytabPrincipal; + + ProvisioningId(String id, String scriptPath, String keytabPath, String esKeytab, String principal) { + this.id = id; + this.scriptPath = scriptPath; + this.keytabPath = keytabPath; + this.esKeytab = esKeytab; + this.keytabPrincipal = principal; + } + } + + public Krb5kDcContainer() { + this(ProvisioningId.HDFS); + } + + public Krb5kDcContainer(ProvisioningId provisioningId) { + super(new RemoteDockerImage(DOCKER_BASE_IMAGE)); + this.provisioningId = provisioningId; + withNetwork(Network.newNetwork()); + addExposedPorts(88, 4444); + withCreateContainerCmdModifier(cmd -> { + // Add previously exposed ports and UDP port + List exposedPorts = new ArrayList<>(); + for (ExposedPort p : cmd.getExposedPorts()) { + exposedPorts.add(p); + } + exposedPorts.add(ExposedPort.udp(88)); + cmd.withExposedPorts(exposedPorts); + + // Add previous port bindings and UDP port binding + Ports ports = cmd.getPortBindings(); + ports.bind(ExposedPort.udp(88), Ports.Binding.empty()); + cmd.withPortBindings(ports); + }); + withNetworkAliases("kerberos.build.elastic.co", "build.elastic.co"); + withCopyFileToContainer(MountableFile.forHostPath("/dev/urandom"), "/dev/random"); + withExtraHost("kerberos.build.elastic.co", "127.0.0.1"); + withCommand("bash", provisioningId.scriptPath); + } + + @Override + public void start() { + try { + temporaryFolder.create(); + } catch (IOException e) { + throw new RuntimeException(e); + } + super.start(); + System.setProperty("java.security.krb5.conf", getConfPath().toString()); + } + + @Override + public void stop() { + super.stop(); + System.clearProperty("java.security.krb5.conf"); + temporaryFolder.delete(); + } + + @SuppressWarnings("all") + public String getConf() { + var bindings = Arrays.asList(getCurrentContainerInfo().getNetworkSettings().getPorts().getBindings().get(ExposedPort.udp(88))) + .stream() + .findFirst(); + String hostPortSpec = bindings.get().getHostPortSpec(); + String s = copyFileFromContainer("/fixture/build/krb5.conf.template", i -> IOUtils.toString(i, StandardCharsets.UTF_8)); + return s.replace("${MAPPED_PORT}", hostPortSpec); + } + + public Path getKeytab() { + if (keytabFile != null) { + return keytabFile; + } + try { + String keytabPath = provisioningId.keytabPath; + keytabFile = temporaryFolder.newFile(provisioningId.id + ".keytab").toPath(); + copyFileFromContainer(keytabPath, keytabFile.toAbsolutePath().toString()); + return keytabFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Path getEsKeytab() { + if (esKeytabFile != null) { + return esKeytabFile; + } + try { + esKeytabFile = temporaryFolder.newFile("elasticsearch.keytab").toPath(); + copyFileFromContainer(provisioningId.esKeytab, esKeytabFile.toAbsolutePath().toString()); + return esKeytabFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Path getConfPath() { + if (krb5ConfFile != null) { + return krb5ConfFile; + } + try { + krb5ConfFile = temporaryFolder.newFile("krb5.conf").toPath(); + Files.write(krb5ConfFile, getConf().getBytes(StandardCharsets.UTF_8)); + return krb5ConfFile; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public String getPrincipal() { + return provisioningId.keytabPrincipal; + } + + public String getEsPrincipal() { + return "elasticsearch@BUILD.ELASTIC.CO"; + } +} diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh index ef5bba076444c..de08a52df3306 100644 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/hdfs.sh @@ -8,4 +8,4 @@ addprinc.sh "hdfs/hdfs.build.elastic.co" # Use this as a signal that setup is complete python3 -m http.server 4444 & -sleep infinity \ No newline at end of file +sleep infinity diff --git a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java index 11063dc4cf10b..1b47039f9ac5c 100644 --- a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java @@ -64,10 +64,16 @@ public DockerEnvironmentAwareTestContainer(Future image) { public void start() { Assume.assumeFalse("Docker support excluded on OS", EXCLUDED_OS); Assume.assumeTrue("Docker probing succesful", DOCKER_PROBING_SUCCESSFUL); - withLogConsumer(new Slf4jLogConsumer(logger())); + withLogConsumer(new Slf4jLogConsumer(LOGGER)); super.start(); } + @Override + public void stop() { + LOGGER.info("Stopping container {}", getContainerId()); + super.stop(); + } + @Override public void cache() { try { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java index 2d4a88c4369b5..de4df7eaaaf49 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/FileResource.java @@ -14,18 +14,19 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.function.Supplier; public class FileResource implements Resource { - private final Path file; + private final Supplier file; - FileResource(Path file) { + FileResource(Supplier file) { this.file = file; } @Override public InputStream asStream() { try { - return Files.newInputStream(file, StandardOpenOption.READ); + return Files.newInputStream(file.get(), StandardOpenOption.READ); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java index 829e34007044d..22dc3e7465426 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java @@ -32,6 +32,10 @@ static Resource fromClasspath(String path) { } static Resource fromFile(Path file) { + return fromFile(() -> file); + } + + static Resource fromFile(Supplier file) { return new FileResource(file); } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 804f4eae4042d..5ac83f94f6248 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -278,11 +278,18 @@ public static Iterable createParameters(NamedXContentRegistry executea return createParameters(executeableSectionRegistry, null); } + /** + * Create parameters for this parameterized test. + */ + public static Iterable createParameters(String[] testPaths, Map yamlParameters) throws Exception { + return createParameters(ExecutableSection.XCONTENT_REGISTRY, testPaths, yamlParameters); + } + /** * Create parameters for this parameterized test. */ public static Iterable createParameters(String[] testPaths) throws Exception { - return createParameters(ExecutableSection.XCONTENT_REGISTRY, testPaths); + return createParameters(testPaths, Collections.emptyMap()); } /** @@ -295,6 +302,23 @@ public static Iterable createParameters(String[] testPaths) throws Exc */ public static Iterable createParameters(NamedXContentRegistry executeableSectionRegistry, String[] testPaths) throws Exception { + return createParameters(executeableSectionRegistry, testPaths, Collections.emptyMap()); + } + + /** + * Create parameters for this parameterized test. + * + * @param executeableSectionRegistry registry of executable sections + * @param testPaths list of paths to explicitly search for tests. If null then include all tests in root path. + * @param yamlParameters map or parameters used within the yaml specs to be replaced at parsing time. + * @return list of test candidates. + * @throws Exception + */ + public static Iterable createParameters( + NamedXContentRegistry executeableSectionRegistry, + String[] testPaths, + Map yamlParameters + ) throws Exception { if (testPaths != null && System.getProperty(REST_TESTS_SUITE) != null) { throw new IllegalArgumentException("The '" + REST_TESTS_SUITE + "' system property is not supported with explicit test paths."); } @@ -308,7 +332,7 @@ public static Iterable createParameters(NamedXContentRegistry executea for (String api : yamlSuites.keySet()) { List yamlFiles = new ArrayList<>(yamlSuites.get(api)); for (Path yamlFile : yamlFiles) { - ClientYamlTestSuite suite = ClientYamlTestSuite.parse(executeableSectionRegistry, api, yamlFile); + ClientYamlTestSuite suite = ClientYamlTestSuite.parse(executeableSectionRegistry, api, yamlFile, yamlParameters); suites.add(suite); try { suite.validate(); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java new file mode 100644 index 0000000000000..43ea4f9d665d0 --- /dev/null +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ParameterizableYamlXContentParser.java @@ -0,0 +1,295 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.rest.yaml; + +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.nio.CharBuffer; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * a wrapper around YamlXContentParser that allows for parameter replacement in the yaml file + */ +public class ParameterizableYamlXContentParser implements XContentParser { + private final XContentParser delegate; + private final Map params; + + public ParameterizableYamlXContentParser(XContentParser delegate, Map params) { + this.delegate = delegate; + this.params = params.entrySet().stream().collect(Collectors.toMap(e -> "@" + e.getKey() + "@", Map.Entry::getValue)); + } + + @Override + public XContentType contentType() { + return delegate.contentType(); + } + + @Override + public void allowDuplicateKeys(boolean allowDuplicateKeys) { + delegate.allowDuplicateKeys(allowDuplicateKeys); + } + + @Override + public Token nextToken() throws IOException { + return delegate.nextToken(); + } + + @Override + @Nullable + public String nextFieldName() throws IOException { + return delegate.nextFieldName(); + } + + @Override + public void skipChildren() throws IOException { + delegate.skipChildren(); + } + + @Override + public Token currentToken() { + return delegate.currentToken(); + } + + @Override + public String currentName() throws IOException { + return delegate.currentName(); + } + + @Override + public Map map() throws IOException { + return delegate.map(); + } + + @Override + public Map mapOrdered() throws IOException { + return visitMapForParameterReplacements(delegate.mapOrdered()); + } + + private Map visitMapForParameterReplacements(Map stringObjectMap) { + var updatedMap = stringObjectMap.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> maybeReplaceParams(e.getValue()))); + return updatedMap; + } + + @SuppressWarnings("unchecked") + private Object maybeReplaceParams(Object inputValue) { + if (inputValue == null) { + return null; + } + if (inputValue instanceof Map) { + return visitMapForParameterReplacements((Map) inputValue); + } + if (inputValue instanceof String) { + if (((String) inputValue).matches(".*@.*@.*")) { + String value = (String) inputValue; + for (String s : params.keySet()) { + if (value.contains(s)) { + value = value.replace(s, params.get(s).toString()); + } + } + return value; + } + } + return inputValue; + } + + @Override + public Map mapStrings() throws IOException { + return delegate.mapStrings(); + } + + @Override + public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) + throws IOException { + return delegate.map(mapFactory, mapValueParser); + } + + @Override + public List list() throws IOException { + return delegate.list(); + } + + @Override + public List listOrderedMap() throws IOException { + return delegate.listOrderedMap(); + } + + @Override + public String text() throws IOException { + return delegate.text(); + } + + @Override + public String textOrNull() throws IOException { + return delegate.textOrNull(); + } + + @Override + public CharBuffer charBufferOrNull() throws IOException { + return delegate.charBufferOrNull(); + } + + @Override + public CharBuffer charBuffer() throws IOException { + return delegate.charBuffer(); + } + + @Override + public Object objectText() throws IOException { + return delegate.objectText(); + } + + @Override + public Object objectBytes() throws IOException { + return delegate.objectBytes(); + } + + @Override + public boolean hasTextCharacters() { + return delegate.hasTextCharacters(); + } + + @Override + public char[] textCharacters() throws IOException { + return delegate.textCharacters(); + } + + @Override + public int textLength() throws IOException { + return delegate.textLength(); + } + + @Override + public int textOffset() throws IOException { + return delegate.textOffset(); + } + + @Override + public Number numberValue() throws IOException { + return delegate.numberValue(); + } + + @Override + public NumberType numberType() throws IOException { + return delegate.numberType(); + } + + @Override + public short shortValue(boolean coerce) throws IOException { + return delegate.shortValue(coerce); + } + + @Override + public int intValue(boolean coerce) throws IOException { + return delegate.intValue(coerce); + } + + @Override + public long longValue(boolean coerce) throws IOException { + return delegate.longValue(coerce); + } + + @Override + public float floatValue(boolean coerce) throws IOException { + return delegate.floatValue(coerce); + } + + @Override + public double doubleValue(boolean coerce) throws IOException { + return delegate.doubleValue(coerce); + } + + @Override + public short shortValue() throws IOException { + return delegate.shortValue(); + } + + @Override + public int intValue() throws IOException { + return delegate.intValue(); + } + + @Override + public long longValue() throws IOException { + return delegate.longValue(); + } + + @Override + public float floatValue() throws IOException { + return delegate.floatValue(); + } + + @Override + public double doubleValue() throws IOException { + return delegate.doubleValue(); + } + + @Override + public boolean isBooleanValue() throws IOException { + return delegate.isBooleanValue(); + } + + @Override + public boolean booleanValue() throws IOException { + return delegate.booleanValue(); + } + + @Override + public byte[] binaryValue() throws IOException { + return delegate.binaryValue(); + } + + @Override + public XContentLocation getTokenLocation() { + return delegate.getTokenLocation(); + } + + @Override + public T namedObject(Class categoryClass, String name, Object context) throws IOException { + return getXContentRegistry().parseNamedObject(categoryClass, name, this, context); + } + + @Override + public NamedXContentRegistry getXContentRegistry() { + return delegate.getXContentRegistry(); + } + + @Override + public boolean isClosed() { + return delegate.isClosed(); + } + + @Override + public RestApiVersion getRestApiVersion() { + return delegate.getRestApiVersion(); + } + + @Override + public DeprecationHandler getDeprecationHandler() { + return delegate.getDeprecationHandler(); + } + + @Override + public void close() throws IOException { + delegate.close(); + } + +} diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index e5f46ff135171..466b64736ddbc 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -10,6 +10,7 @@ import org.elasticsearch.client.NodeSelector; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.test.rest.yaml.ParameterizableYamlXContentParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; @@ -26,6 +27,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -38,7 +40,8 @@ * Supports a setup section and multiple test sections. */ public class ClientYamlTestSuite { - public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSectionRegistry, String api, Path file) throws IOException { + public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSectionRegistry, String api, Path file, Map params) + throws IOException { if (Files.isRegularFile(file) == false) { throw new IllegalArgumentException(file.toAbsolutePath() + " is not a file"); } @@ -63,10 +66,18 @@ public static ClientYamlTestSuite parse(NamedXContentRegistry executeableSection } try ( - XContentParser parser = YamlXContent.yamlXContent.createParser( - XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), - Files.newInputStream(file) - ) + XContentParser parser = params.isEmpty() + ? YamlXContent.yamlXContent.createParser( + XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), + Files.newInputStream(file) + ) + : new ParameterizableYamlXContentParser( + YamlXContent.yamlXContent.createParser( + XContentParserConfiguration.EMPTY.withRegistry(executeableSectionRegistry), + Files.newInputStream(file) + ), + params + ) ) { return parse(api, filename, Optional.of(file), parser); } catch (Exception e) { @@ -103,6 +114,10 @@ public static ClientYamlTestSuite parse(String api, String suiteName, Optional

      (testSections)); } + public static ClientYamlTestSuite parse(NamedXContentRegistry xcontentRegistry, String api, Path filePath) throws IOException { + return parse(xcontentRegistry, api, filePath, Collections.emptyMap()); + } + private final String api; private final String name; private final Optional file; diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index 94aa196f8e8e1..333364c6167c0 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -5,28 +5,17 @@ * 2.0. */ -import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.ports.ReservedPortRange - -import java.nio.file.Files -import java.nio.file.Paths - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' -final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') -final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') -final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') - dependencies { + clusterPlugins project(':plugins:repository-hdfs') javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) - javaRestTestImplementation hdfsRepoPluginProject + javaRestTestImplementation project(path: ':test:fixtures:hdfs-fixture', configuration:"shadowedHdfs2") + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" } restResources { @@ -35,152 +24,7 @@ restResources { } } -testFixtures.useFixture(krbFixtureProject.path, 'hdfs-snapshot') - -configurations { - hdfsFixture -} - -dependencies { - hdfsFixture hdfsFixtureProject - // Set the keytab files in the classpath so that we can access them from test code without the security manager freaking out. - if (isEclipse == false) { - javaRestTestRuntimeOnly files(krbFixtureProject.ext.krb5Keytabs("hdfs-snapshot", "hdfs_hdfs.build.elastic.co.keytab").parent){ - builtBy ":test:fixtures:krb5kdc-fixture:preProcessFixture" - } - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } -} - -String realm = "BUILD.ELASTIC.CO" -String krb5conf = krbFixtureProject.ext.krb5Conf("hdfs") - -// Create HDFS File System Testing Fixtures -for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - dependsOn project.configurations.hdfsFixture, krbFixtureProject.tasks.postProcessFixture - executable = "${BuildParams.runtimeJavaHome}/bin/java" - env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}") - } - // configure port dynamically - def portRange = project.getExtensions().getByType(ReservedPortRange) - miniHDFSArgs.add("-Dhdfs.config.port=${portRange.getOrAllocate(name)}") - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")) - } - - args miniHDFSArgs.toArray() - } -} - -// Disable integration test if Fips mode tasks.named("javaRestTest").configure { - description = "Runs rest tests against an elasticsearch cluster with HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("hdfsFixture") - systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/searchable_snapshots/simple' - BuildParams.withFipsEnabledOnly(it) -} - -tasks.register("javaRestTestSecure", RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with Secured HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("secureHdfsFixture") - nonInputProperties.systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/searchable_snapshots/secure' - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") - ) - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - classpath = sourceSets.javaRestTest.runtimeClasspath - BuildParams.withFipsEnabledOnly(it) -} -tasks.named("check").configure { dependsOn("javaRestTestSecure") } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - plugin(hdfsRepoPluginProject.path) - setting 'xpack.license.self_generated.type', 'trial' - - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - - setting 'xpack.security.enabled', 'false' -} - -testClusters.matching { it.name == "javaRestTestSecure" }.configureEach { - systemProperty "java.security.krb5.conf", krb5conf - jvmArgs "--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED" - extraConfigFile( - "repository-hdfs/krb5.keytab", - file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE - ) -} - -// Determine HDFS Fixture compatibility for the current build environment. -boolean fixtureSupported = false -if (OS.current() != OS.WINDOWS) { - // hdfs fixture will not start without hadoop native libraries on windows - String nativePath = System.getenv("HADOOP_HOME") - if (nativePath != null) { - java.nio.file.Path path = Paths.get(nativePath) - if (Files.isDirectory(path) && - Files.exists(path.resolve("bin").resolve("winutils.exe")) && - Files.exists(path.resolve("bin").resolve("hadoop.dll")) && - Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { - fixtureSupported = true - } else { - throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin") - } - } -} else { - fixtureSupported = true -} - -boolean legalPath = rootProject.rootDir.toString().contains(" ") == false -if (legalPath == false) { - fixtureSupported = false -} - -if (fixtureSupported) { - tasks.named("javaRestTest").configure {dependsOn "hdfsFixture" } - tasks.named("javaRestTestSecure").configure {dependsOn "secureHdfsFixture" } -} else { - tasks.named("javaRestTest").configure {enabled = false } - tasks.named("javaRestTestSecure").configure { enabled = false } - if (legalPath) { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") - } else { - logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") - } + usesDefaultDistribution() + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java index 515be1adccc9b..b8ace1d32d0f7 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/HdfsSearchableSnapshotsIT.java @@ -7,13 +7,39 @@ package org.elasticsearch.xpack.searchablesnapshots.hdfs; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; - +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) public class HdfsSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override protected String writeRepositoryType() { return "hdfs"; @@ -21,19 +47,9 @@ protected String writeRepositoryType() { @Override protected Settings writeRepositorySettings() { - final String uri = System.getProperty("test.hdfs.uri"); - assertThat(uri, not(blankOrNullString())); - - final String path = System.getProperty("test.hdfs.path"); - assertThat(path, not(blankOrNullString())); - - // Optional based on type of test - final String principal = System.getProperty("test.krb5.principal.es"); - + final String uri = "hdfs://localhost:" + hdfsFixture.getPort(); + final String path = "/user/elasticsearch/test/searchable_snapshots/simple"; Settings.Builder repositorySettings = Settings.builder().put("client", "searchable_snapshots").put("uri", uri).put("path", path); - if (principal != null) { - repositorySettings.put("security.principal", principal); - } return repositorySettings.build(); } } diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java new file mode 100644 index 0000000000000..cf30fae9861ff --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/hdfs/SecureHdfsSearchableSnapshotsIT.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.searchablesnapshots.hdfs; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureHdfsSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected String writeRepositoryType() { + return "hdfs"; + } + + @Override + protected Settings writeRepositorySettings() { + final String uri = "hdfs://localhost:" + hdfsFixture.getPort(); + final String path = "/user/elasticsearch/test/searchable_snapshots/secure"; + Settings.Builder repositorySettings = Settings.builder().put("client", "searchable_snapshots").put("uri", uri).put("path", path); + + final String principal = "elasticsearch@BUILD.ELASTIC.CO"; + repositorySettings.put("security.principal", principal); + return repositorySettings.build(); + } + +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 90a6f4ada32e0..3fbb55ca4eb3a 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -5,29 +5,19 @@ * 2.0. */ - -import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.util.ports.ReservedPortRange - -import java.nio.file.Files -import java.nio.file.Paths - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.test.fixtures' -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -apply plugin: 'elasticsearch.internal-available-ports' - -final Project hdfsFixtureProject = project(':test:fixtures:hdfs2-fixture') -final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture') -final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) - javaRestTestImplementation project(':plugins:repository-hdfs') + javaRestTestImplementation project(path: ':test:fixtures:hdfs-fixture', configuration:"shadow") + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') + javaRestTestImplementation "org.slf4j:slf4j-api:${versions.slf4j}" + javaRestTestImplementation "org.slf4j:slf4j-simple:${versions.slf4j}" + javaRestTestRuntimeOnly "com.google.guava:guava:16.0.1" + javaRestTestRuntimeOnly "commons-cli:commons-cli:1.2" } restResources { @@ -36,151 +26,15 @@ restResources { } } -testFixtures.useFixture(krbFixtureProject.path, 'hdfs-snapshot-repo-tests') - -configurations { - hdfsFixture -} - dependencies { - hdfsFixture hdfsFixtureProject - // Set the keytab files in the classpath so that we can access them from test code without the security manager freaking out. - if (isEclipse == false) { - testRuntimeOnly files(krbFixtureProject.ext.krb5Keytabs("hdfs-snapshot-repo-tests", "hdfs_hdfs.build.elastic.co.keytab").parent){ - builtBy ":test:fixtures:krb5kdc-fixture:preProcessFixture" - } - } -} - -normalization { - runtimeClasspath { - // ignore generated keytab files for the purposes of build avoidance - ignore '*.keytab' - // ignore fixture ports file which is on the classpath primarily to pacify the security manager - ignore 'ports' - } -} - -String realm = "BUILD.ELASTIC.CO" -String krb5conf = krbFixtureProject.ext.krb5Conf("hdfs") - -// Create HDFS File System Testing Fixtures -for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) { - project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) { - dependsOn project.configurations.hdfsFixture, krbFixtureProject.tasks.postProcessFixture - executable = "${BuildParams.runtimeJavaHome}/bin/java" - env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" - maxWaitInSeconds 60 - BuildParams.withFipsEnabledOnly(it) - waitCondition = { fixture, ant -> - // the hdfs.MiniHDFS fixture writes the ports file when - // it's ready, so we can just wait for the file to exist - return fixture.portsFile.exists() - } - final List miniHDFSArgs = [] - - // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options - if (name.equals('secureHdfsFixture')) { - onlyIf("Only runtime java version < 16") { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 } - miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]) - miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}") - } - // configure port dynamically - def portRange = project.getExtensions().getByType(ReservedPortRange) - miniHDFSArgs.add("-Dhdfs.config.port=${portRange.getOrAllocate(name)}") - - // Common options - miniHDFSArgs.add('hdfs.MiniHDFS') - miniHDFSArgs.add(baseDir) - - // If it's a secure fixture, then set the principal name and keytab locations to use for auth. - if (name.equals('secureHdfsFixture')) { - miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") - miniHDFSArgs.add(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")) - } - - args miniHDFSArgs.toArray() - } + clusterPlugins project(':plugins:repository-hdfs') } // Disable integration test if Fips mode tasks.named("javaRestTest").configure { + usesDefaultDistribution() description = "Runs rest tests against an elasticsearch cluster with HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("hdfsFixture") - systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/simple' BuildParams.withFipsEnabledOnly(it) -} - -tasks.register("javaRestTestSecure", RestIntegTestTask) { - description = "Runs rest tests against an elasticsearch cluster with Secured HDFS." - def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("secureHdfsFixture") - nonInputProperties.systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort" - nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/secure' - nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" - nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - nonInputProperties.systemProperty( - "test.krb5.keytab.hdfs", - project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab") - ) - onlyIf("FIPS mode disabled and runtime java < 16") { - BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 - } - testClassesDirs = sourceSets.javaRestTest.output.classesDirs - classpath = sourceSets.javaRestTest.runtimeClasspath -} - -tasks.named("check").configure { dependsOn("javaRestTestSecure") } - -testClusters.configureEach { - testDistribution = 'DEFAULT' - plugin(hdfsRepoPluginProject.path) - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'false' -} - -testClusters.matching { it.name == "javaRestTestSecure" }.configureEach { - systemProperty "java.security.krb5.conf", krb5conf - extraConfigFile( - "repository-hdfs/krb5.keytab", - file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE - ) -} - -// Determine HDFS Fixture compatibility for the current build environment. -boolean fixtureSupported = false -if (OS.current() == OS.WINDOWS) { - // hdfs fixture will not start without hadoop native libraries on windows - String nativePath = System.getenv("HADOOP_HOME") - if (nativePath != null) { - java.nio.file.Path path = Paths.get(nativePath) - if (Files.isDirectory(path) && - Files.exists(path.resolve("bin").resolve("winutils.exe")) && - Files.exists(path.resolve("bin").resolve("hadoop.dll")) && - Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { - fixtureSupported = true - } else { - throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin") - } - } -} else { - fixtureSupported = true -} - -boolean legalPath = rootProject.rootDir.toString().contains(" ") == false -if (legalPath == false) { - fixtureSupported = false -} - -if (fixtureSupported) { - tasks.named("javaRestTest").configure {dependsOn "hdfsFixture" } - tasks.named("javaRestTestSecure").configure {dependsOn "secureHdfsFixture" } -} else { - tasks.named("javaRestTest").configure {enabled = false } - tasks.named("javaRestTestSecure").configure { enabled = false } - if (legalPath) { - logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") - } else { - logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") - } + // required for krb5kdc-fixture to work + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java new file mode 100644 index 0000000000000..2810c4801e8dd --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/AbstractHdfsSnapshotRepoTestKitIT.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import org.elasticsearch.common.settings.Settings; + +import static org.hamcrest.Matchers.blankOrNullString; +import static org.hamcrest.Matchers.not; + +public abstract class AbstractHdfsSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { + + @Override + protected String repositoryType() { + return "hdfs"; + } + + @Override + protected Settings repositorySettings() { + final String uri = "hdfs://localhost:" + getHdfsPort(); + // final String uri = System.getProperty("test.hdfs.uri"); + assertThat(uri, not(blankOrNullString())); + + final String path = getRepositoryPath(); + assertThat(path, not(blankOrNullString())); + Settings.Builder repositorySettings = Settings.builder().put("client", "repository_test_kit").put("uri", uri).put("path", path); + return repositorySettings.build(); + } + + protected abstract String getRepositoryPath(); + + protected abstract int getHdfsPort(); + +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java index 2cc81567e94bf..e9787ecdce854 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java @@ -6,33 +6,43 @@ */ package org.elasticsearch.repositories.blobstore.testkit; -import org.elasticsearch.common.settings.Settings; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -public class HdfsSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) +public class HdfsSnapshotRepoTestKitIT extends AbstractHdfsSnapshotRepoTestKitIT { + + public static HdfsFixture hdfsFixture = new HdfsFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(hdfsFixture).around(cluster); @Override - protected String repositoryType() { - return "hdfs"; + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); } @Override - protected Settings repositorySettings() { - final String uri = System.getProperty("test.hdfs.uri"); - assertThat(uri, not(blankOrNullString())); - - final String path = System.getProperty("test.hdfs.path"); - assertThat(path, not(blankOrNullString())); - - // Optional based on type of test - final String principal = System.getProperty("test.krb5.principal.es"); + protected String getRepositoryPath() { + return "/user/elasticsearch/test/repository_test_kit/simple"; + } - Settings.Builder repositorySettings = Settings.builder().put("client", "repository_test_kit").put("uri", uri).put("path", path); - if (principal != null) { - repositorySettings.put("security.principal", principal); - } - return repositorySettings.build(); + @Override + protected int getHdfsPort() { + return hdfsFixture.getPort(); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java new file mode 100644 index 0000000000000..6d599e41e3b9f --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/SecureHdfsSnapshotRepoTestKitIT.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.hdfs.HdfsClientThreadLeakFilter; +import org.elasticsearch.test.fixtures.hdfs.HdfsFixture; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class, TestContainersThreadFilter.class }) +public class SecureHdfsSnapshotRepoTestKitIT extends AbstractHdfsSnapshotRepoTestKitIT { + + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(); + + public static HdfsFixture hdfsFixture = new HdfsFixture().withKerberos(() -> krb5Fixture.getPrincipal(), () -> krb5Fixture.getKeytab()); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .plugin("repository-hdfs") + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "false") + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) + .configFile("repository-hdfs/krb5.conf", Resource.fromString(() -> krb5Fixture.getConf())) + .configFile("repository-hdfs/krb5.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(hdfsFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected int getHdfsPort() { + return hdfsFixture.getPort(); + } + + @Override + protected String getRepositoryPath() { + return "/user/elasticsearch/test/repository_test_kit/secure"; + } + + @Override + protected Settings repositorySettings() { + return Settings.builder().put(super.repositorySettings()).put("security.principal", "elasticsearch@BUILD.ELASTIC.CO").build(); + } +} diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 62d6f0a1e34b8..0ec7044ed18ab 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -1,51 +1,15 @@ -import java.nio.file.Path -import java.nio.file.Paths - apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' - -testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "peppa" dependencies { javaRestTestImplementation project(':x-pack:plugin:core') javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation(testArtifact(project(xpackModule('security')))) + javaRestTestImplementation project(':test:fixtures:krb5kdc-fixture') } -normalization { - runtimeClasspath { - ignore 'krb5.conf' - ignore '*.keytab' - } -} - -tasks.register("copyKeytabToGeneratedResources", Copy) { - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") - into "$buildDir/generated-resources/keytabs" - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "HTTP_localhost.keytab") - into "$buildDir/generated-resources/keytabs" - dependsOn ":test:fixtures:krb5kdc-fixture:postProcessFixture" -} - -tasks.register("copyConfToGeneratedResources", Copy) { - from project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("peppa") - into "$buildDir/generated-resources/conf" - dependsOn ":test:fixtures:krb5kdc-fixture:postProcessFixture" -} - -String realm = "BUILD.ELASTIC.CO" tasks.named("javaRestTest").configure { - dependsOn "copyKeytabToGeneratedResources", "copyConfToGeneratedResources" usesDefaultDistribution() - Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") - Path krb5Conf = Paths.get("${project.buildDir}", "generated-resources", "conf", "krb5.conf") - nonInputProperties.systemProperty 'test.userkt', "peppa@${realm}" - nonInputProperties.systemProperty 'test.userkt.keytab', "${peppaKeytab}" - nonInputProperties.systemProperty 'test.userpwd', "george@${realm}" - nonInputProperties.systemProperty 'test.krb5.conf', "${krb5Conf}" - nonInputProperties.systemProperty 'java.security.krb5.conf', "${krb5Conf}" - systemProperty 'test.userpwd.password', "dino_but_longer_than_14_chars" - systemProperty 'sun.security.krb5.debug', true - classpath += files("$buildDir/generated-resources/keytabs") - classpath += files("$buildDir/generated-resources/conf") + description = "Runs rest tests against an elasticsearch cluster with Kerberos." + // required for krb5kdc-fixture to work + jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } diff --git a/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java b/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java index ff37b9c77735c..3058905548c08 100644 --- a/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java +++ b/x-pack/qa/kerberos-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosAuthenticationIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.security.authc.kerberos; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.elasticsearch.client.Request; @@ -22,12 +24,16 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.krb5kdc.Krb5kDcContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.ietf.jgss.GSSException; import org.junit.Before; import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.io.IOException; import java.net.InetAddress; @@ -56,15 +62,16 @@ * Demonstrates login by keytab and login by password for given user principal * name using rest client. */ +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class KerberosAuthenticationIT extends ESRestTestCase { private static final String ENABLE_KERBEROS_DEBUG_LOGS_KEY = "test.krb.debug"; - private static final String TEST_USER_WITH_KEYTAB_KEY = "test.userkt"; - private static final String TEST_USER_WITH_KEYTAB_PATH_KEY = "test.userkt.keytab"; - private static final String TEST_USER_WITH_PWD_KEY = "test.userpwd"; - private static final String TEST_USER_WITH_PWD_PASSWD_KEY = "test.userpwd.password"; + private static final String TEST_USER_WITH_KEYTAB_KEY = "peppa@BUILD.ELASTIC.CO"; + private static final String TEST_USER_WITH_PWD_KEY = "george@BUILD.ELASTIC.CO"; + private static final String TEST_USER_WITH_PWD_PASSWD_KEY = "dino_but_longer_than_14_chars"; private static final String TEST_KERBEROS_REALM_NAME = "kerberos"; - @ClassRule + public static Krb5kDcContainer krb5Fixture = new Krb5kDcContainer(Krb5kDcContainer.ProvisioningId.PEPPA); + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the hostname when starting the @@ -81,13 +88,16 @@ public class KerberosAuthenticationIT extends ESRestTestCase { .setting("xpack.security.authc.realms.kerberos.kerberos.keytab.path", "es.keytab") .setting("xpack.security.authc.realms.kerberos.kerberos.krb.debug", "true") .setting("xpack.security.authc.realms.kerberos.kerberos.remove_realm_name", "false") - .systemProperty("java.security.krb5.conf", System.getProperty("test.krb5.conf")) + .systemProperty("java.security.krb5.conf", () -> krb5Fixture.getConfPath().toString()) .systemProperty("sun.security.krb5.debug", "true") .user("test_admin", "x-pack-test-password") .user("test_kibana_user", "x-pack-test-password", "kibana_system", false) - .configFile("es.keytab", Resource.fromClasspath("HTTP_localhost.keytab")) + .configFile("es.keytab", Resource.fromFile(() -> krb5Fixture.getEsKeytab())) .build(); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(krb5Fixture).around(cluster); + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -130,20 +140,19 @@ public void setupRoleMapping() throws IOException { } public void testLoginByKeytab() throws IOException, PrivilegedActionException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_KEYTAB_KEY); - final String keytabPath = System.getProperty(TEST_USER_WITH_KEYTAB_PATH_KEY); - final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); + final String keytabPath = krb5Fixture.getKeytab().toString(); + final boolean enabledDebugLogs = Boolean.parseBoolean(ENABLE_KERBEROS_DEBUG_LOGS_KEY); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( - userPrincipalName, + krb5Fixture.getPrincipal(), keytabPath, enabledDebugLogs ); - executeRequestAndVerifyResponse(userPrincipalName, callbackHandler); + executeRequestAndVerifyResponse(krb5Fixture.getPrincipal(), callbackHandler); } public void testLoginByUsernamePassword() throws IOException, PrivilegedActionException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); - final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); + final String userPrincipalName = TEST_USER_WITH_PWD_KEY; + final String password = TEST_USER_WITH_PWD_PASSWD_KEY; final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( userPrincipalName, @@ -154,8 +163,8 @@ public void testLoginByUsernamePassword() throws IOException, PrivilegedActionEx } public void testGetOauth2TokenInExchangeForKerberosTickets() throws PrivilegedActionException, GSSException, IOException { - final String userPrincipalName = System.getProperty(TEST_USER_WITH_PWD_KEY); - final String password = System.getProperty(TEST_USER_WITH_PWD_PASSWD_KEY); + final String userPrincipalName = TEST_USER_WITH_PWD_KEY; + final String password = TEST_USER_WITH_PWD_PASSWD_KEY; final boolean enabledDebugLogs = Boolean.parseBoolean(System.getProperty(ENABLE_KERBEROS_DEBUG_LOGS_KEY)); final SpnegoHttpClientConfigCallbackHandler callbackHandler = new SpnegoHttpClientConfigCallbackHandler( userPrincipalName, From 829ea4d34d260a72d8fc43fa160e2f7a6e2ba978 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 26 Mar 2024 09:12:55 +0100 Subject: [PATCH 169/214] ESQL: Sum, Min, Max and Avg of constants (#105454) Allow expressions like ... | STATS sum([1, -9]), sum(null), min(21.0*3), avg([1,2,3]) by substituting sum(const) by mv_sum(const)*count(*) and min(const) by mv_min(const) (and similarly for max and avg). --- docs/changelog/105454.yaml | 5 + .../src/main/resources/stats.csv-spec | 96 +++++++++ .../esql/expression/SurrogateExpression.java | 3 + .../expression/function/aggregate/Avg.java | 4 +- .../expression/function/aggregate/Max.java | 9 +- .../expression/function/aggregate/Min.java | 9 +- .../expression/function/aggregate/Sum.java | 19 +- .../esql/optimizer/LogicalPlanOptimizer.java | 62 ++++-- .../xpack/esql/planner/AggregateMapper.java | 8 +- .../optimizer/LogicalPlanOptimizerTests.java | 192 +++++++++++++++++- 10 files changed, 375 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/105454.yaml diff --git a/docs/changelog/105454.yaml b/docs/changelog/105454.yaml new file mode 100644 index 0000000000000..fc814a343c46b --- /dev/null +++ b/docs/changelog/105454.yaml @@ -0,0 +1,5 @@ +pr: 105454 +summary: "ESQL: Sum of constants" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 917735040c61d..91c79e64b2385 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1226,3 +1226,99 @@ FROM employees vals:l 183 ; + +sumOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = sum(1), s2point1 = sum(2.1), s_mv = sum([-1, 0, 3]) * 3, s_null = sum(null), rows = count(*) +; + +s1:l | s2point1:d | s_mv:l | s_null:d | rows:l +100 | 210.0 | 600 | null | 100 +; + +sumOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = round(sum(2.1), 1), s_mv = sum([-1, 0, 3]), rows = count(*) by languages +| SORT languages +; + +s2point1:d | s_mv:l | rows:l | languages:i +31.5 | 30 | 15 | 1 +39.9 | 38 | 19 | 2 +35.7 | 34 | 17 | 3 +37.8 | 36 | 18 | 4 +44.1 | 42 | 21 | 5 +21.0 | 20 | 10 | null +; + +avgOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = avg(1), s_mv = avg([-1, 0, 3]) * 3, s_null = avg(null) +; + +s1:d | s_mv:d | s_null:d +1.0 | 2.0 | null +; + +avgOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = avg(2.1), s_mv = avg([-1, 0, 3]) * 3 by languages +| SORT languages +; + +s2point1:d | s_mv:d | languages:i +2.1 | 2.0 | 1 +2.1 | 2.0 | 2 +2.1 | 2.0 | 3 +2.1 | 2.0 | 4 +2.1 | 2.0 | 5 +2.1 | 2.0 | null +; + +minOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = min(1), s_mv = min([-1, 0, 3]), s_null = min(null) +; + +s1:i | s_mv:i | s_null:null +1 | -1 | null +; + +minOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = min(2.1), s_mv = min([-1, 0, 3]) by languages +| SORT languages +; + +s2point1:d | s_mv:i | languages:i +2.1 | -1 | 1 +2.1 | -1 | 2 +2.1 | -1 | 3 +2.1 | -1 | 4 +2.1 | -1 | 5 +2.1 | -1 | null +; + +maxOfConst#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s1 = max(1), s_mv = max([-1, 0, 3]), s_null = max(null) +; + +s1:i | s_mv:i | s_null:null +1 | 3 | null +; + +maxOfConstGrouped#[skip:-8.13.99,reason:supported in 8.14] +FROM employees +| STATS s2point1 = max(2.1), s_mv = max([-1, 0, 3]) by languages +| SORT languages +; + +s2point1:d | s_mv:i | languages:i +2.1 | 3 | 1 +2.1 | 3 | 2 +2.1 | 3 | 3 +2.1 | 3 | 4 +2.1 | 3 | 5 +2.1 | 3 | null +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java index e7f507e3983a7..bf48d1d806e18 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/SurrogateExpression.java @@ -15,5 +15,8 @@ */ public interface SurrogateExpression { + /** + * Returns the expression to be replaced by or {@code null} if this cannot be replaced. + */ Expression surrogate(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java index aee07e6e044c6..3ea0721d52c00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Avg.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; @@ -60,6 +61,7 @@ public Avg replaceChildren(List newChildren) { public Expression surrogate() { var s = source(); var field = field(); - return new Div(s, new Sum(s, field), new Count(s, field), dataType()); + + return field().foldable() ? new MvAvg(s, field) : new Div(s, new Sum(s, field), new Count(s, field), dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index 00c3fd30530cd..3e8030322caa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -11,8 +11,10 @@ import org.elasticsearch.compute.aggregation.MaxDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MaxLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,7 @@ import java.util.List; -public class Max extends NumericAggregate { +public class Max extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The maximum value of a numeric field.", isAggregation = true) public Max(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { @@ -61,4 +63,9 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new MaxDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + return field().foldable() ? new MvMax(source(), field()) : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index 6bbc349e2b523..c69d2f4a1fc2d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -11,8 +11,10 @@ import org.elasticsearch.compute.aggregation.MinDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.MinLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -20,7 +22,7 @@ import java.util.List; -public class Min extends NumericAggregate { +public class Min extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = { "double", "integer", "long" }, description = "The minimum value of a numeric field.", isAggregation = true) public Min(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { @@ -61,4 +63,9 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new MinDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + return field().foldable() ? new MvMin(source(), field()) : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java index e6584262183fa..805724bfcd16c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Sum.java @@ -10,12 +10,18 @@ import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumIntAggregatorFunctionSupplier; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunctionSupplier; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.List; @@ -26,7 +32,7 @@ /** * Sum all values of a field in matching documents. */ -public class Sum extends NumericAggregate { +public class Sum extends NumericAggregate implements SurrogateExpression { @FunctionInfo(returnType = "long", description = "The sum of a numeric field.", isAggregation = true) public Sum(Source source, @Param(name = "number", type = { "double", "integer", "long" }) Expression field) { @@ -63,4 +69,15 @@ protected AggregatorFunctionSupplier intSupplier(List inputChannels) { protected AggregatorFunctionSupplier doubleSupplier(List inputChannels) { return new SumDoubleAggregatorFunctionSupplier(inputChannels); } + + @Override + public Expression surrogate() { + var s = source(); + var field = field(); + + // SUM(const) is equivalent to MV_SUM(const)*COUNT(*). + return field.foldable() + ? new Mul(s, new MvSum(s, field), new Count(s, new Literal(s, StringUtils.WILDCARD, DataTypes.KEYWORD))) + : null; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 93505fa4f20fc..3425306863585 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.AttributeSet; +import org.elasticsearch.xpack.ql.expression.EmptyAttribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.ExpressionSet; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -107,6 +108,23 @@ protected List> batches() { return rules(); } + protected static Batch substitutions() { + return new Batch<>( + "Substitutions", + Limiter.ONCE, + // first extract nested aggs top-level - this simplifies the rest of the rules + new ReplaceStatsAggExpressionWithEval(), + // second extract nested aggs inside of them + new ReplaceStatsNestedExpressionWithEval(), + // lastly replace surrogate functions + new SubstituteSurrogates(), + new ReplaceRegexMatch(), + new ReplaceAliasingEvalWithProject(), + new SkipQueryOnEmptyMappings() + // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 + ); + } + protected static Batch operators() { return new Batch<>( "Operator Optimization", @@ -150,26 +168,11 @@ protected static Batch cleanup() { } protected static List> rules() { - var substitutions = new Batch<>( - "Substitutions", - Limiter.ONCE, - // first extract nested aggs top-level - this simplifies the rest of the rules - new ReplaceStatsAggExpressionWithEval(), - // second extract nested aggs inside of them - new ReplaceStatsNestedExpressionWithEval(), - // lastly replace surrogate functions - new SubstituteSurrogates(), - new ReplaceRegexMatch(), - new ReplaceAliasingEvalWithProject(), - new SkipQueryOnEmptyMappings() - // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 - ); - var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions, operators(), skip, cleanup(), defaultTopN, label); + return asList(substitutions(), operators(), skip, cleanup(), defaultTopN, label); } // TODO: currently this rule only works for aggregate functions (AVG) @@ -191,8 +194,10 @@ protected LogicalPlan rule(Aggregate aggregate) { // first pass to check existing aggregates (to avoid duplication and alias waste) for (NamedExpression agg : aggs) { - if (Alias.unwrap(agg) instanceof AggregateFunction af && af instanceof SurrogateExpression == false) { - aggFuncToAttr.put(af, agg.toAttribute()); + if (Alias.unwrap(agg) instanceof AggregateFunction af) { + if ((af instanceof SurrogateExpression se && se.surrogate() != null) == false) { + aggFuncToAttr.put(af, agg.toAttribute()); + } } } @@ -200,7 +205,7 @@ protected LogicalPlan rule(Aggregate aggregate) { // 0. check list of surrogate expressions for (NamedExpression agg : aggs) { Expression e = Alias.unwrap(agg); - if (e instanceof SurrogateExpression sf) { + if (e instanceof SurrogateExpression sf && sf.surrogate() != null) { changed = true; Expression s = sf.surrogate(); @@ -240,9 +245,22 @@ protected LogicalPlan rule(Aggregate aggregate) { LogicalPlan plan = aggregate; if (changed) { var source = aggregate.source(); - plan = new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), newAggs); + if (newAggs.isEmpty() == false) { + plan = new Aggregate(source, aggregate.child(), aggregate.groupings(), newAggs); + } else { + // All aggs actually have been surrogates for (foldable) expressions, e.g. + // \_Aggregate[[],[AVG([1, 2][INTEGER]) AS s]] + // Replace by a local relation with one row, followed by an eval, e.g. + // \_Eval[[MVAVG([1, 2][INTEGER]) AS s]] + // \_LocalRelation[[{e}#21],[ConstantNullBlock[positions=1]]] + plan = new LocalRelation( + source, + List.of(new EmptyAttribute(source)), + LocalSupplier.of(new Block[] { BlockUtils.constantBlock(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, null, 1) }) + ); + } // 5. force the initial projection in place - if (transientEval.size() > 0) { + if (transientEval.isEmpty() == false) { plan = new Eval(source, plan, transientEval); // project away transient fields and re-enforce the original order using references (not copies) to the original aggs // this works since the replaced aliases have their nameId copied to avoid having to update all references (which has @@ -500,6 +518,8 @@ public LogicalPlan apply(LogicalPlan plan) { plan = plan.transformUp(p -> { // Apply the replacement inside Filter and Eval (which shouldn't make a difference) + // TODO: also allow aggregates once aggs on constants are supported. + // C.f. https://github.com/elastic/elasticsearch/issues/100634 if (p instanceof Filter || p instanceof Eval) { p = p.transformExpressionsOnly(ReferenceAttribute.class, replaceReference); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 6ed191a6df500..a95d846133c45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -11,11 +11,9 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.expression.SurrogateExpression; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.aggregate.CountDistinct; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; -import org.elasticsearch.xpack.esql.expression.function.aggregate.Median; import org.elasticsearch.xpack.esql.expression.function.aggregate.MedianAbsoluteDeviation; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.NumericAggregate; @@ -43,7 +41,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -55,12 +52,11 @@ public class AggregateMapper { static final List NUMERIC = List.of("Int", "Long", "Double"); static final List SPATIAL = List.of("GeoPoint", "CartesianPoint"); - /** List of all ESQL agg functions. */ + /** List of all mappable ESQL agg functions (excludes surrogates like AVG = SUM/COUNT). */ static final List> AGG_FUNCTIONS = List.of( Count.class, CountDistinct.class, Max.class, - Median.class, MedianAbsoluteDeviation.class, Min.class, Percentile.class, @@ -79,7 +75,7 @@ record AggDef(Class aggClazz, String type, String extra, boolean grouping) {} private final HashMap> cache = new HashMap<>(); AggregateMapper() { - this(AGG_FUNCTIONS.stream().filter(Predicate.not(SurrogateExpression.class::isAssignableFrom)).toList()); + this(AGG_FUNCTIONS); } AggregateMapper(List> aggregateFunctionClasses) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 952fbc6f265e4..3f0b39603ef89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -112,9 +112,11 @@ import org.junit.BeforeClass; import java.lang.reflect.Constructor; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -173,6 +175,19 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Analyzer analyzerAirports; private static EnrichResolution enrichResolution; + private static class SubstitutionOnlyOptimizer extends LogicalPlanOptimizer { + static SubstitutionOnlyOptimizer INSTANCE = new SubstitutionOnlyOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); + + SubstitutionOnlyOptimizer(LogicalOptimizerContext optimizerContext) { + super(optimizerContext); + } + + @Override + protected List> batches() { + return List.of(substitutions()); + } + } + @BeforeClass public static void init() { parser = new EsqlParser(); @@ -3272,6 +3287,177 @@ public void testStatsWithCanonicalAggregate() throws Exception { assertThat(Expressions.attribute(fields.get(1)), is(Expressions.attribute(sum_argument))); } + /** + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7, w{r}#10]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7, w{r}#10]] + * \_Eval[[MVSUM([1, 2][INTEGER]) * $$COUNT$s$0{r}#25 AS s, MVSUM(314.0[DOUBLE] / 100[INTEGER]) * $$COUNT$s$0{r}#25 AS s + * _expr, MVSUM(null[NULL]) * $$COUNT$s$0{r}#25 AS s_null]] + * \_Aggregate[[w{r}#10],[COUNT(*[KEYWORD]) AS $$COUNT$s$0, w{r}#10]] + * \_Eval[[emp_no{f}#15 % 2[INTEGER] AS w]] + * \_EsRelation[test][_meta_field{f}#21, emp_no{f}#15, first_name{f}#16, ..] + */ + public void testSumOfLiteral() { + var plan = plan(""" + from test + | stats s = sum([1,2]), + s_expr = sum(314.0/100), + s_null = sum(null) + by w = emp_no % 2 + | keep s, s_expr, s_null, w + """, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var agg = as(eval.child(), Aggregate.class); + + var exprs = eval.fields(); + // s = count(*) * 3 + var s = as(exprs.get(0), Alias.class); + assertThat(s.name(), equalTo("s")); + var mul = as(s.child(), Mul.class); + var mvSum = as(mul.left(), MvSum.class); + assertThat(mvSum.fold(), equalTo(3)); + var count = as(mul.right(), ReferenceAttribute.class); + assertThat(count.name(), equalTo("$$COUNT$s$0")); + + // s_expr = count(*) * 3.14 + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.name(), equalTo("s_expr")); + var mul_expr = as(s_expr.child(), Mul.class); + var mvSum_expr = as(mul_expr.left(), MvSum.class); + assertThat(mvSum_expr.fold(), equalTo(3.14)); + var count_expr = as(mul_expr.right(), ReferenceAttribute.class); + assertThat(count_expr.name(), equalTo("$$COUNT$s$0")); + + // s_null = null + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.name(), equalTo("s_null")); + var mul_null = as(s_null.child(), Mul.class); + var mvSum_null = as(mul_null.left(), MvSum.class); + assertThat(mvSum_null.field(), equalTo(NULL)); + var count_null = as(mul_null.right(), ReferenceAttribute.class); + assertThat(count_null.name(), equalTo("$$COUNT$s$0")); + + var count_agg = as(Alias.unwrap(agg.aggregates().get(0)), Count.class); + assertThat(count_agg.children().get(0), instanceOf(Literal.class)); + var w = as(Alias.unwrap(agg.groupings().get(0)), ReferenceAttribute.class); + assertThat(w.name(), equalTo("w")); + } + + private record AggOfLiteralTestCase( + String aggFunctionName, + Class substitution, + Function aggMultiValue + ) {}; + + private static List AGG_OF_CONST_CASES = List.of( + new AggOfLiteralTestCase("avg", MvAvg.class, ints -> ((double) Arrays.stream(ints).sum()) / ints.length), + new AggOfLiteralTestCase("min", MvMin.class, ints -> Arrays.stream(ints).min().getAsInt()), + new AggOfLiteralTestCase("max", MvMax.class, ints -> Arrays.stream(ints).max().getAsInt()) + ); + + /** + * Aggs of literals in case that the agg can be simply replaced by a corresponding mv-function; + * e.g. avg([1,2,3]) which is equivalent to mv_avg([1,2,3]). + * + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7]] + * \_Eval[[MVAVG([1, 2][INTEGER]) AS s, MVAVG(314.0[DOUBLE] / 100[INTEGER]) AS s_expr, MVAVG(null[NULL]) AS s_null]] + * \_LocalRelation[[{e}#21],[ConstantNullBlock[positions=1]]] + */ + public void testAggOfLiteral() { + for (AggOfLiteralTestCase testCase : AGG_OF_CONST_CASES) { + String query = LoggerMessageFormat.format(null, """ + from test + | stats s = {}([1,2]), + s_expr = {}(314.0/100), + s_null = {}(null) + | keep s, s_expr, s_null + """, testCase.aggFunctionName, testCase.aggFunctionName, testCase.aggFunctionName); + + var plan = plan(query, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var singleRowRelation = as(eval.child(), LocalRelation.class); + var singleRow = singleRowRelation.supplier().get(); + assertThat(singleRow.length, equalTo(1)); + assertThat(singleRow[0].getPositionCount(), equalTo(1)); + + var exprs = eval.fields(); + var s = as(exprs.get(0), Alias.class); + assertThat(s.child(), instanceOf(testCase.substitution)); + assertThat(s.child().fold(), equalTo(testCase.aggMultiValue.apply(new int[] { 1, 2 }))); + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.child(), instanceOf(testCase.substitution)); + assertThat(s_expr.child().fold(), equalTo(3.14)); + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.child(), instanceOf(testCase.substitution)); + assertThat(s_null.child().fold(), equalTo(null)); + } + } + + /** + * Like {@link LogicalPlanOptimizerTests#testAggOfLiteral()} but with a grouping key. + * + * Expects after running the {@link LogicalPlanOptimizer#substitutions()}: + * + * Limit[1000[INTEGER]] + * \_EsqlProject[[s{r}#3, s_expr{r}#5, s_null{r}#7, emp_no{f}#13]] + * \_Project[[s{r}#3, s_expr{r}#5, s_null{r}#7, emp_no{f}#13]] + * \_Eval[[MVAVG([1, 2][INTEGER]) AS s, MVAVG(314.0[DOUBLE] / 100[INTEGER]) AS s_expr, MVAVG(null[NULL]) AS s_null]] + * \_Aggregate[[emp_no{f}#13],[emp_no{f}#13]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testAggOfLiteralGrouped() { + for (AggOfLiteralTestCase testCase : AGG_OF_CONST_CASES) { + String query = LoggerMessageFormat.format(null, """ + from test + | stats s = {}([1,2]), + s_expr = {}(314.0/100), + s_null = {}(null) + by emp_no + | keep s, s_expr, s_null, emp_no + """, testCase.aggFunctionName, testCase.aggFunctionName, testCase.aggFunctionName); + + var plan = plan(query, SubstitutionOnlyOptimizer.INSTANCE); + + var limit = as(plan, Limit.class); + var esqlProject = as(limit.child(), EsqlProject.class); + var project = as(esqlProject.child(), Project.class); + var eval = as(project.child(), Eval.class); + var agg = as(eval.child(), Aggregate.class); + assertThat(agg.child(), instanceOf(EsRelation.class)); + + // Assert exprs + var exprs = eval.fields(); + + var s = as(exprs.get(0), Alias.class); + assertThat(s.child(), instanceOf(testCase.substitution)); + assertThat(s.child().fold(), equalTo(testCase.aggMultiValue.apply(new int[] { 1, 2 }))); + var s_expr = as(exprs.get(1), Alias.class); + assertThat(s_expr.child(), instanceOf(testCase.substitution)); + assertThat(s_expr.child().fold(), equalTo(3.14)); + var s_null = as(exprs.get(2), Alias.class); + assertThat(s_null.child(), instanceOf(testCase.substitution)); + assertThat(s_null.child().fold(), equalTo(null)); + + // Assert that the aggregate only does the grouping by emp_no + assertThat(Expressions.names(agg.groupings()), contains("emp_no")); + assertThat(agg.aggregates().size(), equalTo(1)); + } + } + public void testEmptyMappingIndex() { EsIndex empty = new EsIndex("empty_test", emptyMap(), emptySet()); IndexResolution getIndexResultAirports = IndexResolution.valid(empty); @@ -3455,9 +3641,13 @@ private LogicalPlan optimizedPlan(String query) { } private LogicalPlan plan(String query) { + return plan(query, logicalOptimizer); + } + + private LogicalPlan plan(String query, LogicalPlanOptimizer optimizer) { var analyzed = analyzer.analyze(parser.createStatement(query)); // System.out.println(analyzed); - var optimized = logicalOptimizer.optimize(analyzed); + var optimized = optimizer.optimize(analyzed); // System.out.println(optimized); return optimized; } From daf46b56e943aed8e9c545eb657105fae75d8e45 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 26 Mar 2024 10:25:12 +0100 Subject: [PATCH 170/214] Print out state of index if didn't recover from the snapshot (#106726) Add additional debug information about the index and its recovery state if the assertion for `recoveredFromSnapshotBytes` fails See #87568 --- .../recovery/SnapshotBasedIndexRecoveryIT.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index 1465911490f61..8951b91cb76a3 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -986,7 +986,11 @@ public void testRecoveryConcurrentlyWithIndexing() throws Exception { if (waitForSnapshotDownloadToStart) { // must complete using snapshots alone. RecoveryState recoveryState = getLatestPeerRecoveryStateForShard(indexName, 0); - assertThat(recoveryState.getIndex().recoveredFromSnapshotBytes(), equalTo(snapshotSizeForIndex)); + assertThat( + "Index " + recoveryState.getIndex() + " should be completely recovered from the snapshot", + recoveryState.getIndex().recoveredFromSnapshotBytes(), + equalTo(snapshotSizeForIndex) + ); } assertDocumentsAreEqual(indexName, numDocs.get()); From 2c38fa7f94315e8689a29d895d170afc3bfa1388 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Tue, 26 Mar 2024 13:56:58 +0100 Subject: [PATCH 171/214] knn qvb test disable id assertion (#106746) --- .../rest-api-spec/test/ml/search_knn_query_vector_builder.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml index 4cab2c7908748..869bba90345c4 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/search_knn_query_vector_builder.yml @@ -121,7 +121,6 @@ setup: model_id: text_embedding_model model_text: "the octopus comforter smells" - length: { hits.hits: 3 } - - match: { hits.hits.0._id: "0" } --- "nested kNN search with inner_hits size": From 84872e24800b7d243488db2aed54aec7a5e69d6a Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Tue, 26 Mar 2024 15:20:40 +0200 Subject: [PATCH 172/214] Rename DataStreamGlobalRetentionTests (#106754) --- .../get/GetComponentTemplateResponseTests.java | 11 ++++------- .../cluster/metadata/ComponentTemplateTests.java | 2 +- .../metadata/ComposableIndexTemplateTests.java | 2 +- ...Tests.java => DataStreamGlobalRetentionTests.java} | 2 +- .../cluster/metadata/DataStreamLifecycleTests.java | 2 +- .../cluster/metadata/DataStreamTests.java | 2 +- 6 files changed, 9 insertions(+), 12 deletions(-) rename server/src/test/java/org/elasticsearch/cluster/metadata/{DataStreamGlobalRetentionSerializationTests.java => DataStreamGlobalRetentionTests.java} (96%) diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java index 2af4bf5016ad2..025f51b7df997 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateResponseTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComponentTemplateTests; -import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSerializationTests; +import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionTests; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.Strings; @@ -47,7 +47,7 @@ protected GetComponentTemplateAction.Response createTestInstance() { return new GetComponentTemplateAction.Response( randomBoolean() ? Map.of() : randomTemplates(), RolloverConfigurationTests.randomRolloverConditions(), - DataStreamGlobalRetentionSerializationTests.randomGlobalRetention() + DataStreamGlobalRetentionTests.randomGlobalRetention() ); } @@ -59,10 +59,7 @@ protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplat switch (randomInt(2)) { case 0 -> templates = templates == null ? randomTemplates() : null; case 1 -> rolloverConditions = randomValueOtherThan(rolloverConditions, RolloverConfigurationTests::randomRolloverConditions); - case 2 -> globalRetention = randomValueOtherThan( - globalRetention, - DataStreamGlobalRetentionSerializationTests::randomGlobalRetention - ); + case 2 -> globalRetention = randomValueOtherThan(globalRetention, DataStreamGlobalRetentionTests::randomGlobalRetention); } return new GetComponentTemplateAction.Response(templates, rolloverConditions, globalRetention); } @@ -88,7 +85,7 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws null, false ); - var globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + var globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); var rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); var response = new GetComponentTemplateAction.Response( Map.of(randomAlphaOfLength(10), template), diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java index 7efa624b49148..067a67ee025a1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java @@ -289,7 +289,7 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java index 6485634f879ba..15b55b5f002bb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplateTests.java @@ -238,7 +238,7 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); template.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java rename to server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java index 5cd104f1f59b5..e65b4d41bbe02 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamGlobalRetentionTests.java @@ -18,7 +18,7 @@ import java.util.List; -public class DataStreamGlobalRetentionSerializationTests extends SimpleDiffableWireSerializationTestCase { +public class DataStreamGlobalRetentionTests extends SimpleDiffableWireSerializationTestCase { @Override protected ClusterState.Custom makeTestChanges(ClusterState.Custom testInstance) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java index fc650a5e65909..38b09f3690870 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamLifecycleTests.java @@ -112,7 +112,7 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); lifecycle.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); String serialized = Strings.toString(builder); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index a1a523ddb584d..9db7d1047e249 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -1697,7 +1697,7 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { builder.humanReadable(true); RolloverConfiguration rolloverConfiguration = RolloverConfigurationTests.randomRolloverConditions(); - DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionSerializationTests.randomGlobalRetention(); + DataStreamGlobalRetention globalRetention = DataStreamGlobalRetentionTests.randomGlobalRetention(); ToXContent.Params withEffectiveRetention = new ToXContent.MapParams(DataStreamLifecycle.INCLUDE_EFFECTIVE_RETENTION_PARAMS); dataStream.toXContent(builder, withEffectiveRetention, rolloverConfiguration, globalRetention); From f34f5d4bc98d736d16bb93adbd94a51b2c48749d Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 26 Mar 2024 09:44:16 -0400 Subject: [PATCH 173/214] [Transform] Auto retry Transform start (#106243) * [Transform] Auto retry Transform start Currently, unattended Transforms can fail to start due to failure to load the Config from its internal index. This usually happens when a Transform is created and immediately started by a system. The error looks like: ``` Failed to load transform configuration for transform [id] ``` Now, we will automatically retry the startup logic until the Config is ready. Some notes: - We cannot determine if a transform is unattended or not, so at this stage we will assume all transforms are unattended. - The persistent task running the transform will move into the `STARTED` state. Users can stop the persistent task and retry logic using the Transform's Stop API. - While retrying, the Transform will report `Yellow` health in the API and `degraded` in Kibana. The health message will include that the transform is automatically retrying and what error it had encountered. --- docs/changelog/106243.yaml | 5 + .../transforms/TransformConfigTests.java | 38 ++- .../transforms/TransformContext.java | 34 +++ .../transforms/TransformHealthChecker.java | 24 +- .../TransformPersistentTasksExecutor.java | 75 +++++- .../TransformRetryableStartUpListener.java | 102 ++++++++ .../transform/transforms/TransformTask.java | 2 +- .../transforms/TransformContextTests.java | 70 ++++- .../TransformHealthCheckerTests.java | 25 ++ ...TransformPersistentTasksExecutorTests.java | 179 +++++++++++-- ...ransformRetryableStartUpListenerTests.java | 239 ++++++++++++++++++ 11 files changed, 743 insertions(+), 50 deletions(-) create mode 100644 docs/changelog/106243.yaml create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java create mode 100644 x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java diff --git a/docs/changelog/106243.yaml b/docs/changelog/106243.yaml new file mode 100644 index 0000000000000..6b02e3f1699d4 --- /dev/null +++ b/docs/changelog/106243.yaml @@ -0,0 +1,5 @@ +pr: 106243 +summary: "[Transform] Auto retry Transform start" +area: "Transform" +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index ba2cd0ba04312..f1c2de11496bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -123,6 +122,20 @@ public static TransformConfig randomTransformConfig(String id, TransformConfigVe return randomTransformConfig(id, version, pivotConfig, latestConfig); } + public static TransformConfig randomTransformConfig(String id, TimeValue frequency, TransformConfigVersion version) { + PivotConfig pivotConfig; + LatestConfig latestConfig; + if (randomBoolean()) { + pivotConfig = PivotConfigTests.randomPivotConfig(); + latestConfig = null; + } else { + pivotConfig = null; + latestConfig = LatestConfigTests.randomLatestConfig(); + } + + return randomTransformConfig(id, frequency, version, pivotConfig, latestConfig); + } + public static TransformConfig randomTransformConfigWithSettings(SettingsConfig settingsConfig) { PivotConfig pivotConfig; LatestConfig latestConfig; @@ -157,12 +170,28 @@ public static TransformConfig randomTransformConfig( TransformConfigVersion version, PivotConfig pivotConfig, LatestConfig latestConfig + ) { + return randomTransformConfig( + id, + randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), + version, + pivotConfig, + latestConfig + ); + } + + public static TransformConfig randomTransformConfig( + String id, + TimeValue frequency, + TransformConfigVersion version, + PivotConfig pivotConfig, + LatestConfig latestConfig ) { return new TransformConfig( id, randomSourceConfig(), randomDestConfig(), - randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), + frequency, randomBoolean() ? null : randomSyncConfig(), randomHeaders(), pivotConfig, @@ -281,10 +310,7 @@ protected ToXContent.Params getToXContentParams() { } private static Map randomHeaders() { - Map headers = Maps.newMapWithExpectedSize(1); - headers.put("key", "value"); - - return headers; + return Map.of("key", "value"); } public void testDefaultMatchAll() throws IOException { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java index 7fdabda6189a9..6119f446e8dc4 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformContext.java @@ -39,6 +39,9 @@ public interface Listener { private final AtomicInteger statePersistenceFailureCount = new AtomicInteger(); private final AtomicReference lastStatePersistenceFailure = new AtomicReference<>(); private volatile Instant lastStatePersistenceFailureStartTime; + private final AtomicInteger startUpFailureCount = new AtomicInteger(); + private final AtomicReference lastStartUpFailure = new AtomicReference<>(); + private volatile Instant startUpFailureTime; private volatile Instant changesLastDetectedAt; private volatile Instant lastSearchTime; private volatile boolean shouldStopAtCheckpoint = false; @@ -214,6 +217,37 @@ Instant getLastStatePersistenceFailureStartTime() { return lastStatePersistenceFailureStartTime; } + void resetStartUpFailureCount() { + startUpFailureCount.set(0); + lastStartUpFailure.set(null); + startUpFailureTime = null; + } + + int getStartUpFailureCount() { + return startUpFailureCount.get(); + } + + Throwable getStartUpFailure() { + return lastStartUpFailure.get(); + } + + int incrementAndGetStartUpFailureCount(Throwable failure) { + lastStartUpFailure.set(failure); + int newFailureCount = startUpFailureCount.incrementAndGet(); + if (newFailureCount == 1) { + startUpFailureTime = Instant.now(); + } + return newFailureCount; + } + + Instant getStartUpFailureTime() { + return startUpFailureTime; + } + + boolean doesNotHaveFailures() { + return getFailureCount() == 0 && getStatePersistenceFailureCount() == 0 && getStartUpFailureCount() == 0; + } + void shutdown() { taskListener.shutdown(); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java index 86d8ce4a6173c..24c5d45a38f75 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformHealthChecker.java @@ -38,7 +38,8 @@ public enum IssueType { PRIVILEGES_CHECK_FAILED("Privileges check failed"), TRANSFORM_TASK_FAILED("Transform task state is [failed]"), TRANSFORM_INDEXER_FAILED("Transform indexer failed"), - TRANSFORM_INTERNAL_STATE_UPDATE_FAILED("Task encountered failures updating internal state"); + TRANSFORM_INTERNAL_STATE_UPDATE_FAILED("Task encountered failures updating internal state"), + TRANSFORM_STARTUP_FAILED("Transform task is automatically retrying its startup process"); private final String issue; @@ -88,8 +89,7 @@ public static TransformHealth checkTransform(TransformTask transformTask) { public static TransformHealth checkTransform(TransformTask transformTask, @Nullable AuthorizationState authState) { // quick check if (TransformTaskState.FAILED.equals(transformTask.getState().getTaskState()) == false - && transformTask.getContext().getFailureCount() == 0 - && transformTask.getContext().getStatePersistenceFailureCount() == 0 + && transformTask.getContext().doesNotHaveFailures() && AuthorizationState.isNullOrGreen(authState)) { return TransformHealth.GREEN; } @@ -145,6 +145,24 @@ public static TransformHealth checkTransform(TransformTask transformTask, @Nulla ); } + if (transformContext.getStartUpFailureCount() != 0) { + if (HealthStatus.RED.equals(maxStatus) == false) { + maxStatus = HealthStatus.YELLOW; + } + + var lastFailure = transformContext.getStartUpFailure(); + var lastFailureMessage = lastFailure instanceof ElasticsearchException elasticsearchException + ? elasticsearchException.getDetailedMessage() + : lastFailure.getMessage(); + issues.add( + IssueType.TRANSFORM_STARTUP_FAILED.newIssue( + lastFailureMessage, + transformContext.getStartUpFailureCount(), + transformContext.getStartUpFailureTime() + ) + ); + } + return new TransformHealth(maxStatus, issues); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index ae9678893df9a..f18414e3aaead 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformStoredDoc; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformExtension; @@ -203,6 +204,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa final SetOnce stateHolder = new SetOnce<>(); + // <7> log the start result ActionListener startTaskListener = ActionListener.wrap( response -> logger.info("[{}] successfully completed and scheduled task in node operation", transformId), failure -> { @@ -348,21 +350,18 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa }); // <2> Get the transform config - ActionListener templateCheckListener = ActionListener.wrap( - aVoid -> transformServices.getConfigManager().getTransformConfiguration(transformId, getTransformConfigListener), - error -> { - Throwable cause = ExceptionsHelper.unwrapCause(error); - String msg = "Failed to create internal index mappings"; - markAsFailed(buildTask, error, msg + "[" + cause + "]"); - } - ); + var templateCheckListener = getTransformConfig(buildTask, params, getTransformConfigListener); // <1> Check the latest internal index (IMPORTANT: according to _this_ node, which might be newer than master) is installed TransformInternalIndex.createLatestVersionedIndexIfRequired( clusterService, parentTaskClient, transformExtension.getTransformInternalIndexAdditionalSettings(), - templateCheckListener + templateCheckListener.delegateResponse((l, e) -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + String msg = "Failed to create internal index mappings"; + markAsFailed(buildTask, e, msg + "[" + cause + "]"); + }) ); } @@ -401,6 +400,64 @@ private static void markAsFailed(TransformTask task, Throwable exception, String } } + private ActionListener getTransformConfig( + TransformTask task, + TransformTaskParams params, + ActionListener listener + ) { + return ActionListener.running(() -> { + var transformId = params.getId(); + // if this call fails for the first time, we are going to retry it indefinitely + // register the retry using the TransformScheduler, when the call eventually succeeds, deregister it before returning + var scheduler = transformServices.getScheduler(); + scheduler.registerTransform( + params, + new TransformRetryableStartUpListener<>( + transformId, + l -> transformServices.getConfigManager().getTransformConfiguration(transformId, l), + ActionListener.runBefore(listener, () -> scheduler.deregisterTransform(transformId)), + retryListener(task), + () -> true, // because we can't determine if this is an unattended transform yet, retry indefinitely + task.getContext() + ) + ); + }); + } + + /** + * This listener is always called after the first execution of a {@link TransformRetryableStartUpListener}. + * + * When the result is true, then the first call has failed and will retry. Save the state as Started and unblock the network thread, + * notifying the user with a 200 OK (acknowledged). + * + * When the result is false, then the first call has succeeded, and no further action is required for this listener. + */ + private ActionListener retryListener(TransformTask task) { + return ActionListener.wrap(isRetrying -> { + if (isRetrying) { + var oldState = task.getState(); + var newState = new TransformState( + TransformTaskState.STARTED, + oldState.getIndexerState(), + oldState.getPosition(), + oldState.getCheckpoint(), + "Retrying transform start.", + oldState.getProgress(), + oldState.getNode(), + oldState.shouldStopAtNextCheckpoint(), + oldState.getAuthState() + ); + task.persistStateToClusterState( + newState, + ActionListener.wrap( + rr -> logger.debug("[{}] marked as retrying in TransformState.", task.getTransformId()), + ee -> logger.atWarn().withThrowable(ee).log("[{}] failed to persist state.", task.getTransformId()) + ) + ); + } + }, e -> markAsFailed(task, e, "Failed to initiate retries for Transform.")); + } + private void startTask( TransformTask buildTask, ClientTransformIndexerBuilder indexerBuilder, diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java new file mode 100644 index 0000000000000..17548fd8d427f --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListener.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.transforms; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import java.util.function.Supplier; + +class TransformRetryableStartUpListener implements TransformScheduler.Listener { + private final String transformId; + private final Consumer> action; + private final ActionListener actionListener; + private final ActionListener retryScheduledListener; + private final Supplier shouldRetry; + private final TransformContext context; + private final AtomicBoolean isFirstRun; + private final AtomicBoolean isRunning; + + /** + * @param transformId the transform associated with this listener. All events to this listener must be for the same transformId. + * @param action the action this listener will take. When the TransformScheduler invokes {@link #triggered(TransformScheduler.Event)}, + * the call is forwarded to this action. + * @param actionListener actionListener will be notified via #onResponse when the action succeeds or via #onFailure when retries have + * stopped. If the Transform Stop API deregisters this class from the Scheduler, this actionListener will *not* be + * invoked. + * @param retryScheduledListener retryScheduledListener will be notified after the first call. If true, another thread has started the + * retry process. If false, the original call was successful, and no retries will happen. + * @param shouldRetry allows an external entity to gracefully stop these retries, invoking the actionListener's #onFailure method. + * Note that external entities are still required to deregister this listener from the Scheduler. + * @param context the transform's context object. This listener will update the StartUpFailureCount information in the context as it + * encounters errors and retries. + */ + TransformRetryableStartUpListener( + String transformId, + Consumer> action, + ActionListener actionListener, + ActionListener retryScheduledListener, + Supplier shouldRetry, + TransformContext context + ) { + this.transformId = transformId; + this.action = action; + this.actionListener = actionListener; + this.retryScheduledListener = retryScheduledListener; + this.shouldRetry = shouldRetry; + this.context = context; + this.isFirstRun = new AtomicBoolean(true); + this.isRunning = new AtomicBoolean(true); + } + + @Override + public void triggered(TransformScheduler.Event event) { + if (isRunning.get() && transformId.equals(event.transformId())) { + action.accept(ActionListener.wrap(this::actionSucceeded, this::actionFailed)); + } + } + + private void markDone() { + if (isRunning.compareAndSet(true, false)) { + synchronized (context) { + context.resetStartUpFailureCount(); + } + } + } + + private void actionSucceeded(Response r) { + maybeNotifyRetryListener(false); + markDone(); + actionListener.onResponse(r); + } + + private void maybeNotifyRetryListener(boolean response) { + if (isFirstRun.compareAndSet(true, false)) { + retryScheduledListener.onResponse(response); + } + } + + private void actionFailed(Exception e) { + if (shouldRetry.get()) { + maybeNotifyRetryListener(true); + recordError(e); + } else { + maybeNotifyRetryListener(false); + markDone(); + actionListener.onFailure(e); + } + } + + private void recordError(Exception e) { + synchronized (context) { + context.incrementAndGetStartUpFailureCount(e); + } + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index ac81579e8dd71..dbfc30a38f4c3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -71,7 +71,7 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS private final SetOnce indexer = new SetOnce<>(); @SuppressWarnings("this-escape") - public TransformTask( + TransformTask( long id, String type, String action, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java index 86193ef511618..d8e505ad16a49 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformContextTests.java @@ -21,9 +21,11 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; public class TransformContextTests extends ESTestCase { @@ -41,19 +43,81 @@ public void verifyNoMoreInteractionsOnMocks() { } public void testFailureCount() { - TransformContext context = new TransformContext(null, null, 0, listener); - assertThat(context.incrementAndGetFailureCount(new RuntimeException("some_exception")), is(equalTo(1))); + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetFailureCount(someException), is(equalTo(1))); assertThat(context.getFailureCount(), is(equalTo(1))); - assertThat(context.incrementAndGetFailureCount(new IllegalArgumentException("some_other_exception")), is(equalTo(2))); + assertThat(context.getLastFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetFailureCount(someOtherException), is(equalTo(2))); assertThat(context.getFailureCount(), is(equalTo(2))); + assertThat(context.getLastFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + context.resetReasonAndFailureCounter(); assertThat(context.getFailureCount(), is(equalTo(0))); assertThat(context.getLastFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); // Verify that the listener is notified every time the failure count is incremented or reset verify(listener, times(3)).failureCountChanged(); } + private Throwable someException() { + return new RuntimeException("some_exception"); + } + + private Throwable someOtherException() { + return new IllegalArgumentException("some_other_exception"); + } + + public void testStatePersistenceFailureCount() { + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetStatePersistenceFailureCount(someException), is(equalTo(1))); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(1))); + assertThat(context.getLastStatePersistenceFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetStatePersistenceFailureCount(someOtherException), is(equalTo(2))); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(2))); + assertThat(context.getLastStatePersistenceFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + + context.resetStatePersistenceFailureCount(); + assertThat(context.getStatePersistenceFailureCount(), is(equalTo(0))); + assertThat(context.getLastStatePersistenceFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); + verifyNoInteractions(listener); + } + + public void testStartUpFailureCount() { + var context = new TransformContext(null, null, 0, listener); + + var someException = someException(); + assertThat(context.incrementAndGetStartUpFailureCount(someException), is(equalTo(1))); + assertThat(context.getStartUpFailureCount(), is(equalTo(1))); + assertThat(context.getStartUpFailure(), is(sameInstance(someException))); + assertFalse(context.doesNotHaveFailures()); + + var someOtherException = someOtherException(); + assertThat(context.incrementAndGetStartUpFailureCount(someOtherException), is(equalTo(2))); + assertThat(context.getStartUpFailureCount(), is(equalTo(2))); + assertThat(context.getStartUpFailure(), is(sameInstance(someOtherException))); + assertFalse(context.doesNotHaveFailures()); + + context.resetStartUpFailureCount(); + assertThat(context.getStartUpFailureCount(), is(equalTo(0))); + assertThat(context.getStartUpFailure(), is(nullValue())); + assertTrue(context.doesNotHaveFailures()); + verifyNoInteractions(listener); + } + public void testCheckpoint() { TransformContext context = new TransformContext(null, null, 13, listener); assertThat(context.getCheckpoint(), is(equalTo(13L))); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java index 3b4604caca5cd..e52428bc94c13 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformHealthCheckerTests.java @@ -17,6 +17,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -101,6 +102,30 @@ public void testStatusSwitchingAndMultipleFailures() { assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); } + public void testStartUpFailures() { + var task = mock(TransformTask.class); + var context = createTestContext(); + var now = getNow(); + + withIdStateAndContext(task, randomAlphaOfLength(10), context); + assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); + + context.incrementAndGetStartUpFailureCount(new ElasticsearchException("failed to persist")); + + var health = TransformHealthChecker.checkTransform(task); + assertThat(health.getStatus(), equalTo(HealthStatus.YELLOW)); + assertEquals(1, health.getIssues().size()); + assertThat(health.getIssues().get(0).getIssue(), equalTo("Transform task is automatically retrying its startup process")); + assertThat(health.getIssues().get(0).getFirstOccurrence(), greaterThanOrEqualTo(now)); + assertThat(health.getIssues().get(0).getFirstOccurrence(), lessThan(Instant.MAX)); + + IntStream.range(0, 10).forEach(i -> context.incrementAndGetStartUpFailureCount(new ElasticsearchException("failed to persist"))); + assertThat("Start up failures should always be yellow regardless of count", health.getStatus(), equalTo(HealthStatus.YELLOW)); + + context.resetStartUpFailureCount(); + assertThat(TransformHealthChecker.checkTransform(task), equalTo(TransformHealth.GREEN)); + } + private TransformContext createTestContext() { return new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index b927a248faf31..b5192535e911a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.transform.transforms; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -24,6 +25,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; @@ -31,33 +33,74 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.Assignment; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; +import org.elasticsearch.xpack.core.transform.transforms.AuthorizationState; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; +import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; +import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.core.transform.transforms.persistence.TransformInternalIndexConstants; import org.elasticsearch.xpack.transform.DefaultTransformExtension; import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.TransformServices; import org.elasticsearch.xpack.transform.checkpoint.TransformCheckpointService; import org.elasticsearch.xpack.transform.notifications.TransformAuditor; -import org.elasticsearch.xpack.transform.persistence.IndexBasedTransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.InMemoryTransformConfigManager; +import org.elasticsearch.xpack.transform.persistence.TransformConfigManager; import org.elasticsearch.xpack.transform.persistence.TransformInternalIndexTests; import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; +import org.junit.AfterClass; +import org.junit.BeforeClass; import java.time.Clock; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class TransformPersistentTasksExecutorTests extends ESTestCase { + private static ThreadPool threadPool; + + @BeforeClass + public static void setUpThreadPool() { + threadPool = new TestThreadPool(TransformPersistentTasksExecutorTests.class.getSimpleName()) { + @Override + public ExecutorService executor(String name) { + return EsExecutors.DIRECT_EXECUTOR_SERVICE; + } + + @Override + public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor name) { + command.run(); + return null; + } + }; + } + + @AfterClass + public static void tearDownThreadPool() { + terminate(threadPool); + } public void testNodeVersionAssignment() { DiscoveryNodes.Builder nodes = buildNodes(false, true, true, true, true); @@ -262,6 +305,88 @@ public void testVerifyIndicesPrimaryShardsAreActive() { assertEquals(indexToRemove, result.get(0)); } + public void testNodeOperation() { + var transformsConfigManager = new InMemoryTransformConfigManager(); + var transformScheduler = new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO); + var taskExecutor = buildTaskExecutor(transformServices(transformsConfigManager, transformScheduler)); + + var transformId = "testNodeOperation"; + var params = mockTaskParams(transformId); + + putTransformConfiguration(transformsConfigManager, transformId); + var task = mockTransformTask(); + taskExecutor.nodeOperation(task, params, mock()); + + verify(task).start(isNull(), any()); + } + + private void putTransformConfiguration(TransformConfigManager configManager, String transformId) { + configManager.putTransformConfiguration( + TransformConfigTests.randomTransformConfig(transformId, TimeValue.timeValueMillis(1), TransformConfigVersion.CURRENT), + ActionListener.noop().delegateResponse((l, e) -> fail(e)) + ); + } + + public void testNodeOperationStartupRetry() throws Exception { + var failFirstCall = new AtomicBoolean(true); + var transformsConfigManager = new InMemoryTransformConfigManager() { + @Override + public void getTransformConfiguration(String transformId, ActionListener resultListener) { + if (failFirstCall.compareAndSet(true, false)) { + resultListener.onFailure(new IllegalStateException("Failing first call.")); + } else { + super.getTransformConfiguration(transformId, resultListener); + } + } + }; + + var transformScheduler = new TransformScheduler(Clock.systemUTC(), threadPool, fastRetry(), TimeValue.ZERO); + var taskExecutor = buildTaskExecutor(transformServices(transformsConfigManager, transformScheduler)); + + var transformId = "testNodeOperationStartupRetry"; + var params = mockTaskParams(transformId); + putTransformConfiguration(transformsConfigManager, transformId); + + var task = mockTransformTask(); + taskExecutor.nodeOperation(task, params, mock()); + + // skip waiting for the scheduler to run the task a second time and just rerun it now + transformScheduler.scheduleNow(transformId); + + // verify the retry listener set the state to TransformTaskState.STARTED + IndexerState.STOPPED + verify(task).persistStateToClusterState(argThat(state -> { + assertThat(TransformTaskState.STARTED, equalTo(state.getTaskState())); + assertThat(IndexerState.STOPPED, equalTo(state.getIndexerState())); + return true; + }), any()); + verify(task).start(isNull(), any()); + } + + private Settings fastRetry() { + // must be >= [1s] + return Settings.builder().put(Transform.SCHEDULER_FREQUENCY.getKey(), TimeValue.timeValueSeconds(1)).build(); + } + + private TransformTaskParams mockTaskParams(String transformId) { + var params = mock(TransformTaskParams.class); + when(params.getId()).thenReturn(transformId); + when(params.getFrequency()).thenReturn(TimeValue.timeValueSeconds(1)); + return params; + } + + private TransformTask mockTransformTask() { + var task = mock(TransformTask.class); + when(task.setAuthState(any(AuthorizationState.class))).thenReturn(task); + when(task.setNumFailureRetries(anyInt())).thenReturn(task); + when(task.getParentTaskId()).thenReturn(TaskId.EMPTY_TASK_ID); + when(task.getContext()).thenReturn(mock()); + doAnswer(a -> fail(a.getArgument(0, Throwable.class))).when(task).fail(any(Throwable.class), any(String.class), any()); + when(task.getState()).thenReturn( + new TransformState(TransformTaskState.STOPPED, IndexerState.STOPPED, null, 0, null, null, null, false, null) + ); + return task; + } + private void addIndices(Metadata.Builder metadata, RoutingTable.Builder routingTable) { List indices = new ArrayList<>(); indices.add(TransformInternalIndexConstants.AUDIT_INDEX); @@ -415,23 +540,20 @@ private ClusterState buildClusterState(DiscoveryNodes.Builder nodes) { csBuilder.metadata(metadata); return csBuilder.build(); - } private TransformPersistentTasksExecutor buildTaskExecutor() { - ClusterService clusterService = mock(ClusterService.class); - Client client = mock(Client.class); - TransformAuditor mockAuditor = mock(TransformAuditor.class); - IndexBasedTransformConfigManager transformsConfigManager = new IndexBasedTransformConfigManager( - clusterService, - TestIndexNameExpressionResolver.newInstance(), - client, - xContentRegistry() + var transformServices = transformServices( + new InMemoryTransformConfigManager(), + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); - Clock clock = Clock.systemUTC(); - ThreadPool threadPool = mock(ThreadPool.class); - TransformCheckpointService transformCheckpointService = new TransformCheckpointService( - clock, + return buildTaskExecutor(transformServices); + } + + private TransformServices transformServices(TransformConfigManager configManager, TransformScheduler scheduler) { + var mockAuditor = mock(TransformAuditor.class); + var transformCheckpointService = new TransformCheckpointService( + Clock.systemUTC(), Settings.EMPTY, new ClusterService( Settings.EMPTY, @@ -439,28 +561,29 @@ private TransformPersistentTasksExecutor buildTaskExecutor() { null, (TaskManager) null ), - transformsConfigManager, + configManager, mockAuditor ); - TransformServices transformServices = new TransformServices( - transformsConfigManager, - transformCheckpointService, - mockAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) - ); - - ClusterSettings cSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(Transform.NUM_FAILURE_RETRIES_SETTING)); - when(clusterService.getClusterSettings()).thenReturn(cSettings); - when(clusterService.state()).thenReturn(TransformInternalIndexTests.randomTransformClusterState()); + return new TransformServices(configManager, transformCheckpointService, mockAuditor, scheduler); + } + private TransformPersistentTasksExecutor buildTaskExecutor(TransformServices transformServices) { return new TransformPersistentTasksExecutor( - client, + mock(Client.class), transformServices, threadPool, - clusterService, + clusterService(), Settings.EMPTY, new DefaultTransformExtension(), TestIndexNameExpressionResolver.newInstance() ); } + + private ClusterService clusterService() { + var clusterService = mock(ClusterService.class); + var cSettings = new ClusterSettings(Settings.EMPTY, Set.of(Transform.NUM_FAILURE_RETRIES_SETTING)); + when(clusterService.getClusterSettings()).thenReturn(cSettings); + when(clusterService.state()).thenReturn(TransformInternalIndexTests.randomTransformClusterState()); + return clusterService; + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java new file mode 100644 index 0000000000000..1a2bbfd434455 --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformRetryableStartUpListenerTests.java @@ -0,0 +1,239 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.transforms; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.only; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; + +public class TransformRetryableStartUpListenerTests extends ESTestCase { + /** + * When the action succeeds on the first try + * Then we invoked the retryListener with "false" and then invoked the actionListener's onResponse. + */ + public void testFirstRunPasses() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + immediatelyReturn(), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 success and no retries + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertFalse("Retries should not be scheduled.", retryResult.get()); + verify(context, only()).resetStartUpFailureCount(); + } + + private Consumer> immediatelyReturn() { + return l -> l.onResponse(null); + } + + private ActionListener responseListener(AtomicInteger result) { + return ActionListener.wrap(r -> { + if (result.compareAndSet(0, 1) == false) { + fail("Response Listener should only be called at most once for every test."); + } + }, e -> { + if (result.compareAndSet(0, -1) == false) { + fail("Response Listener should only be called at most once for every test."); + } + }); + } + + private ActionListener retryListener(AtomicReference result) { + return ActionListener.wrap(result::set, e -> fail("Retry Listener is never expected to fail.")); + } + + private void callThreeTimes(String transformId, TransformRetryableStartUpListener listener) { + listener.triggered(event(transformId)); + listener.triggered(event(transformId)); + listener.triggered(event(transformId)); + } + + private TransformScheduler.Event event(String transformId) { + return new TransformScheduler.Event(transformId, System.currentTimeMillis(), System.currentTimeMillis()); + } + + /** + * When the action fails once then succeeds on the second try + * Then we invoked the retryListener with "true" and then invoked the actionListener's onResponse. + */ + public void testFirstRunFails() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + failOnceThen(immediatelyReturn()), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 retry and 1 success + assertEquals("Response Listener should only be called once.", 1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } + + private Consumer> failOnceThen(Consumer> followup) { + var firstRun = new AtomicBoolean(true); + return l -> { + if (firstRun.compareAndSet(true, false)) { + l.onFailure(new IllegalStateException("first call fails")); + } else { + followup.accept(l); + } + }; + } + + /** + * When the TransformRetryableStartUpListener is never invoked + * Then there should be no failures to report + */ + public void testUnusedRetryableIsNotReported() { + var context = mock(TransformContext.class); + + new TransformRetryableStartUpListener<>( + "transformId", + failOnceThen(immediatelyReturn()), + responseListener(), + retryListener(), + () -> true, + context + ); + + verifyNoInteractions(context); + } + + private ActionListener retryListener() { + return retryListener(new AtomicReference<>()); + } + + private ActionListener responseListener() { + return responseListener(new AtomicInteger()); + } + + /** + * Given one transformId + * When we receive an event for another transformId + * Then we should not take any action + */ + public void testWrongTransformIdIsIgnored() { + var correctTransformId = "transformId"; + var incorrectTransformId = "someOtherTransformId"; + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + correctTransformId, + failOnceThen(immediatelyReturn()), + responseListener(responseResult), + retryListener(retryResult), + () -> true, + context + ); + + listener.triggered(event(incorrectTransformId)); + + assertEquals("Response Listener should never be called once.", 0, responseResult.get()); + assertNull("Retry Listener should not be called.", retryResult.get()); + verifyNoInteractions(context); + } + + /** + * Given an action that always fails + * When shouldRetry returns true and then false + * Then we should call the actionListener's onFailure handler + */ + public void testCancelRetries() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + var runTwice = new AtomicBoolean(true); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + alwaysFail(), + responseListener(responseResult), + retryListener(retryResult), + () -> runTwice.compareAndSet(true, false), + context + ); + + callThreeTimes("transformId", listener); + + // assert only 1 retry and 1 failure + assertEquals("Response Listener should only be called once.", -1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertTrue("Retries should be scheduled.", retryResult.get()); + verify(context, times(1)).incrementAndGetStartUpFailureCount(any(IllegalStateException.class)); + verify(context, times(1)).resetStartUpFailureCount(); + } + + private Consumer> alwaysFail() { + return l -> l.onFailure(new IllegalStateException("always fail")); + } + + /** + * Given an action that always fails + * When shouldRetry returns false + * Then we should call the actionListener's onFailure handler and the retryListener with "false" + */ + public void testCancelRetryImmediately() { + var retryResult = new AtomicReference(); + var responseResult = new AtomicInteger(0); + var context = mock(TransformContext.class); + + var listener = new TransformRetryableStartUpListener<>( + "transformId", + alwaysFail(), + responseListener(responseResult), + retryListener(retryResult), + () -> false, + context + ); + + callThreeTimes("transformId", listener); + + // assert no retries and 1 failure + assertEquals("Response Listener should only be called once.", -1, responseResult.get()); + assertNotNull("Retry Listener should be called.", retryResult.get()); + assertFalse("Retries should not be scheduled.", retryResult.get()); + verify(context, only()).resetStartUpFailureCount(); + } +} From 16888016048d891befdc026d783f2ca70738154c Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 26 Mar 2024 15:18:22 +0100 Subject: [PATCH 174/214] InternalMultiTermsTests#testReduceWithDoublePromotion should reduce for final (#106750) --- .../analytics/multiterms/InternalMultiTermsTests.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java index 76d8130f954de..be020f74eafff 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTermsTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xpack.analytics.AnalyticsPlugin; @@ -358,18 +359,20 @@ public void testReduceWithDoublePromotion() { keyConverters2, null ); - AggregationReduceContext context = new AggregationReduceContext.ForPartial( + AggregationReduceContext context = new AggregationReduceContext.ForFinal( bigArrays, mockScriptService, () -> false, - mock(AggregationBuilder.class) + mock(AggregationBuilder.class), + i -> {}, + PipelineAggregator.PipelineTree.EMPTY ); InternalMultiTerms result = (InternalMultiTerms) InternalAggregationTestCase.reduce(List.of(terms1, terms2), context); assertThat(result.buckets, hasSize(3)); - assertThat(result.buckets.get(0).getKeyAsString(), equalTo("4|9.223372036854776E18|4.0")); + assertThat(result.buckets.get(0).getKeyAsString(), equalTo("4|9.223372036854776E18|1.0")); assertThat(result.buckets.get(0).getDocCount(), equalTo(3L)); - assertThat(result.buckets.get(1).getKeyAsString(), equalTo("4|9.223372036854776E18|1.0")); + assertThat(result.buckets.get(1).getKeyAsString(), equalTo("4|9.223372036854776E18|4.0")); assertThat(result.buckets.get(1).getDocCount(), equalTo(3L)); assertThat(result.buckets.get(2).getKeyAsString(), equalTo("3|9.223372036854776E18|3.0")); assertThat(result.buckets.get(2).getDocCount(), equalTo(2L)); From 58ed0936773fc20aa2e9d9e57ebe1ca1c8b049ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 26 Mar 2024 15:21:20 +0100 Subject: [PATCH 175/214] [Transform] Raise loglevel of events related to transform lifecycle from DEBUG to INFO (#106602) --- docs/changelog/106602.yaml | 5 +++++ .../transform/action/TransportDeleteTransformAction.java | 2 +- .../xpack/transform/action/TransportPutTransformAction.java | 2 +- .../transform/action/TransportResetTransformAction.java | 2 +- .../transform/action/TransportUpdateTransformAction.java | 2 +- .../transform/action/TransportUpgradeTransformsAction.java | 2 +- 6 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/106602.yaml diff --git a/docs/changelog/106602.yaml b/docs/changelog/106602.yaml new file mode 100644 index 0000000000000..972d7b5d163d3 --- /dev/null +++ b/docs/changelog/106602.yaml @@ -0,0 +1,5 @@ +pr: 106602 +summary: Raise loglevel of events related to transform lifecycle from DEBUG to INFO +area: Transform +type: enhancement +issues: [] diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java index d96ba88faff9a..51379b81d7e9d 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportDeleteTransformAction.java @@ -95,7 +95,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // <3> Delete transform config ActionListener deleteDestIndexListener = ActionListener.wrap( unusedAcknowledgedResponse -> transformConfigManager.deleteTransform(request.getId(), ActionListener.wrap(r -> { - logger.debug("[{}] deleted transform", request.getId()); + logger.info("[{}] deleted transform", request.getId()); auditor.info(request.getId(), "Deleted transform."); listener.onResponse(AcknowledgedResponse.of(r)); }, listener::onFailure)), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 8a82880f4d9a3..df36a850a3b0a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -168,7 +168,7 @@ private void putTransform(Request request, ActionListener var config = request.getConfig(); transformConfigManager.putTransformConfiguration(config, listener.delegateFailureAndWrap((l, unused) -> { var transformId = config.getId(); - logger.debug("[{}] created transform", transformId); + logger.info("[{}] created transform", transformId); auditor.info(transformId, "Created transform."); var validationFunc = FunctionFactory.create(config); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java index 87f24ae7c2bc8..6d0e3213d67fc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportResetTransformAction.java @@ -110,7 +110,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // <4> Reset transform ActionListener updateTransformListener = ActionListener.wrap( unusedUpdateResult -> transformConfigManager.resetTransform(request.getId(), ActionListener.wrap(resetResponse -> { - logger.debug("[{}] reset transform", request.getId()); + logger.info("[{}] reset transform", request.getId()); auditor.info(request.getId(), "Reset transform."); listener.onResponse(AcknowledgedResponse.of(resetResponse)); }, listener::onFailure)), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index b35566c6467c4..3fb271aeb1535 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -154,7 +154,7 @@ protected void doExecute(Task task, Request request, ActionListener li TransformConfig updatedConfig = updateResult.getConfig(); AuthorizationState authState = updateResult.getAuthState(); auditor.info(updatedConfig.getId(), "Updated transform."); - logger.debug("[{}] Updated transform [{}]", updatedConfig.getId(), updateResult.getStatus()); + logger.info("[{}] Updated transform [{}]", updatedConfig.getId(), updateResult.getStatus()); checkTransformConfigAndLogWarnings(updatedConfig); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java index 8dd7b541b4e28..592b7b423c053 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpgradeTransformsAction.java @@ -199,7 +199,7 @@ private void recursiveUpdate( updateOneTransform(next, dryRun, timeout, ActionListener.wrap(updateResponse -> { if (UpdateResult.Status.DELETED.equals(updateResponse.getStatus()) == false) { auditor.info(next, "Updated transform."); - logger.debug("[{}] Updated transform [{}]", next, updateResponse.getStatus()); + logger.info("[{}] Updated transform [{}]", next, updateResponse.getStatus()); updatesByStatus.compute(updateResponse.getStatus(), (k, v) -> (v == null) ? 1 : v + 1L); } if (transformsToUpgrade.isEmpty() == false) { From ceb2701719f4a976ea92113b9fcc752881ac2679 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 26 Mar 2024 15:31:38 +0100 Subject: [PATCH 176/214] Ignore repository-hdfs integ tests in fips mode (#106762) Fixes https://github.com/elastic/elasticsearch/issues/106757 --- plugins/repository-hdfs/build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index beaf8723df4d5..49fc88a15f7d3 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -7,6 +7,7 @@ */ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -81,6 +82,7 @@ tasks.named("dependencyLicenses").configure { tasks.withType(RestIntegTestTask).configureEach { usesDefaultDistribution() + BuildParams.withFipsEnabledOnly(it) jvmArgs '--add-exports', 'java.security.jgss/sun.security.krb5=ALL-UNNAMED' } From 08d7542a31bea6d2965a50e8cfa757acfb177500 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 26 Mar 2024 16:35:36 +0100 Subject: [PATCH 177/214] Fix concurrency bug in AbstractStringScriptFieldAutomatonQuery (#106678) Back when we introduced queries against runtime fields, Elasticsearch did not support inter-segment concurrency yet. At the time, it was fine to assume that segments will be searched sequentially. AbstractStringScriptFieldAutomatonQuery used to have a BytesRefBuilder instance shared across the segments, which gets re-initialized when each segment starts its work. This is no longer possible with inter-segment concurrency. Closes #105911 --- docs/changelog/106678.yaml | 6 ++ .../AbstractBooleanScriptFieldQuery.java | 2 +- .../AbstractDoubleScriptFieldQuery.java | 2 +- .../AbstractGeoPointScriptFieldQuery.java | 2 +- .../runtime/AbstractIpScriptFieldQuery.java | 2 +- .../runtime/AbstractLongScriptFieldQuery.java | 2 +- .../runtime/AbstractScriptFieldQuery.java | 31 +++++--- ...stractStringScriptFieldAutomatonQuery.java | 26 ++++++- .../mapper/BooleanScriptFieldTypeTests.java | 41 +++++----- .../mapper/DateScriptFieldTypeTests.java | 37 ++++----- .../mapper/DoubleScriptFieldTypeTests.java | 37 ++++----- .../mapper/GeoPointScriptFieldTypeTests.java | 17 +++-- .../index/mapper/IpScriptFieldTypeTests.java | 43 +++++------ .../mapper/KeywordScriptFieldTypeTests.java | 75 ++++++++++--------- .../mapper/LongScriptFieldTypeTests.java | 41 +++++----- .../StringScriptFieldFuzzyQueryTests.java | 22 +++--- .../StringScriptFieldRegexpQueryTests.java | 21 +++--- .../StringScriptFieldWildcardQueryTests.java | 22 +++--- .../AbstractScriptFieldTypeTestCase.java | 13 ++++ .../AbstractGeoShapeScriptFieldQuery.java | 2 +- .../mapper/GeoShapeScriptFieldTypeTests.java | 15 ++-- 21 files changed, 260 insertions(+), 199 deletions(-) create mode 100644 docs/changelog/106678.yaml diff --git a/docs/changelog/106678.yaml b/docs/changelog/106678.yaml new file mode 100644 index 0000000000000..20bf12d6d4346 --- /dev/null +++ b/docs/changelog/106678.yaml @@ -0,0 +1,6 @@ +pr: 106678 +summary: Fix concurrency bug in `AbstractStringScriptFieldAutomatonQuery` +area: Search +type: bug +issues: + - 105911 diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java index 38363ee3e3fdd..c6ddd1964188f 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractBooleanScriptFieldQuery.java @@ -23,7 +23,7 @@ abstract class AbstractBooleanScriptFieldQuery extends AbstractScriptFieldQuery< } @Override - protected boolean matches(BooleanFieldScript scriptContext, int docId) { + protected final boolean matches(BooleanFieldScript scriptContext, int docId) { scriptContext.runForDoc(docId); return matches(scriptContext.trues(), scriptContext.falses()); } diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java index 500d00628bd19..722cff6fc0edf 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractDoubleScriptFieldQuery.java @@ -22,7 +22,7 @@ abstract class AbstractDoubleScriptFieldQuery extends AbstractScriptFieldQuery values) { + protected TwoPhaseIterator createTwoPhaseIterator(StringFieldScript scriptContext, DocIdSetIterator approximation) { + BytesRefBuilder scratch = new BytesRefBuilder(); + return new TwoPhaseIterator(approximation) { + @Override + public boolean matches() { + scriptContext.runForDoc(approximation.docID()); + return AbstractStringScriptFieldAutomatonQuery.this.matches(scriptContext.getValues(), scratch); + } + + @Override + public float matchCost() { + return MATCH_COST; + } + }; + } + + protected final boolean matches(List values, BytesRefBuilder scratch) { for (String value : values) { scratch.copyChars(value); if (automaton.run(scratch.bytes(), 0, scratch.length())) { @@ -41,6 +58,11 @@ protected final boolean matches(List values) { return false; } + @Override + protected final boolean matches(List values) { + throw new UnsupportedOperationException(); + } + @Override public final void visit(QueryVisitor visitor) { if (visitor.acceptField(fieldName())) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index d55eaf9df3452..0cdc9568f1fac 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -55,6 +55,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -73,8 +74,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -104,7 +105,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(1L, 0L, 1L))); + assertThat(results, containsInAnyOrder(1L, 0L, 1L)); } } } @@ -112,8 +113,8 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -128,8 +129,8 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); { @@ -185,10 +186,10 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3)); @@ -199,7 +200,7 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -210,7 +211,7 @@ public void testRangeQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -221,8 +222,8 @@ public void testRangeQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -269,7 +270,7 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1)); @@ -282,7 +283,7 @@ public void testTermQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1)); @@ -305,7 +306,7 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(1)); @@ -315,7 +316,7 @@ public void testTermsQuery() throws IOException { } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(1)); @@ -364,7 +365,7 @@ public XContentParser parser() { while (ctx.parser().nextToken() != Token.END_ARRAY) { ootb.parse(ctx); } - iw.addDocument(ctx.doc()); + addDocument(iw, ctx.doc()); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertSameCount( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java index 25a79022c245e..09d4b62fb157c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java @@ -60,6 +60,7 @@ import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -160,8 +161,8 @@ public void testFormatDuel() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1595432181351]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -191,7 +192,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(1595518581354L, 1595518581351L, 1595518581356L))); + assertThat(results, containsInAnyOrder(1595518581354L, 1595518581351L, 1595518581356L)); } } } @@ -199,9 +200,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); DateScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -220,9 +221,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -300,8 +301,8 @@ private Query randomDistanceFeatureQuery(MappedFieldType ft, SearchExecutionCont @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -312,9 +313,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -394,8 +395,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("2020-07-22T15:36:21.354Z", mockContext())), equalTo(1)); @@ -422,8 +423,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); try (DirectoryReader reader = iw.getReader()) { MappedFieldType ft = simpleMappedFieldType(); IndexSearcher searcher = newSearcher(reader); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index ed365a2460203..9547b4f9cb9a3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -45,6 +45,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { @@ -71,8 +72,8 @@ public void testFormat() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.0]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [3.14, 1.4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.0]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [3.14, 1.4]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -102,7 +103,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(2.0, 2.4, 4.140000000000001))); + assertThat(results, containsInAnyOrder(2.0, 2.4, 4.140000000000001)); } } } @@ -110,9 +111,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -128,9 +129,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4.2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -158,8 +159,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -170,9 +171,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.5]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -195,8 +196,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); @@ -218,8 +219,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2.1]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java index 36f691341425c..3289e46941a45 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptFieldTypeTests.java @@ -44,6 +44,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class GeoPointScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { @@ -71,8 +72,8 @@ protected boolean supportsRangeQueries() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -103,7 +104,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(new GeoPoint(45.0, 45.0), new GeoPoint(0.0, 0.0)))); + assertThat(results, containsInAnyOrder(new GeoPoint(45.0, 45.0), new GeoPoint(0.0, 0.0))); } } } @@ -117,7 +118,7 @@ public void testSort() throws IOException { public void testFetch() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef(""" + addDocument(iw, List.of(new StoredField("_source", new BytesRef(""" {"foo": {"lat": 45.0, "lon" : 45.0}}""")))); try (DirectoryReader reader = iw.getReader()) { SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -138,8 +139,8 @@ public void testFetch() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -167,8 +168,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 45.0, \"lon\" : 45.0}}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": {\"lat\": 0.0, \"lon\" : 0.0}}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 5eb66e631d86f..4726424ada5f2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -49,6 +49,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; @@ -75,8 +76,8 @@ public void testFormat() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.2\", \"192.168.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.2\", \"192.168.1\"]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -107,7 +108,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of("192.168.0.1", "192.168.1.1", "192.168.2.1"))); + assertThat(results, containsInAnyOrder("192.168.0.1", "192.168.1.1", "192.168.2.1")); } } } @@ -115,9 +116,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -142,9 +143,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.4\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.2\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -172,8 +173,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -184,9 +185,9 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -207,9 +208,9 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); IpScriptFieldType fieldType = build("append_param", Map.of("param", ".1"), OnScriptError.FAIL); @@ -229,10 +230,10 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"200.0.0.1\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"1.1.1.1\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index d8903251e6c3b..6912194625bb7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -49,6 +49,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase { @@ -66,8 +67,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -97,7 +98,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of("1-suffix", "1-suffix", "2-suffix"))); + assertThat(results, containsInAnyOrder("1-suffix", "1-suffix", "2-suffix")); } } } @@ -105,9 +106,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"d\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"d\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -123,9 +124,9 @@ public void testSort() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aaa\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"a\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aaa\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aa\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -153,8 +154,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -164,11 +165,11 @@ public void testExistsQuery() throws IOException { public void testFuzzyQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); // No edits, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caat\"]}")))); // Single insertion, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cta\"]}")))); // Single transposition, matches - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caaat\"]}")))); // Two insertions, no match - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); // Totally wrong, no match + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); // No edits, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caat\"]}")))); // Single insertion, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cta\"]}")))); // Single transposition, matches + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"caaat\"]}")))); // Two insertions, no match + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); // Totally wrong, no match try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -200,9 +201,9 @@ private Query randomFuzzyQuery(MappedFieldType ft, SearchExecutionContext ctx) { public void testPrefixQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().prefixQuery("cat", null, mockContext())), equalTo(2)); @@ -225,9 +226,9 @@ private Query randomPrefixQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -268,9 +269,9 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testRegexpQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cat\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"cata\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"dog\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat( @@ -294,8 +295,8 @@ private Query randomRegexpQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-suffix"), OnScriptError.FAIL); @@ -312,10 +313,10 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [3]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [3]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1", "2"), mockContext())), equalTo(2)); @@ -330,8 +331,8 @@ protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) public void testWildcardQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().wildcardQuery("a*b", null, mockContext())), equalTo(1)); @@ -342,8 +343,8 @@ public void testWildcardQuery() throws IOException { // Normalized WildcardQueries are requested by the QueryStringQueryParser public void testNormalizedWildcardQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"aab\"]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"b\"]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().normalizedWildcardQuery("a*b", null, mockContext())), equalTo(1)); @@ -365,8 +366,8 @@ private Query randomWildcardQuery(MappedFieldType ft, SearchExecutionContext ctx public void testMatchQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-Suffix"), OnScriptError.FAIL); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index debcd3c5fa911..83b3dbe858471 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -47,6 +47,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -83,8 +84,8 @@ public void testLongFromSource() throws IOException { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2, 1]}")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -114,7 +115,7 @@ public void collect(int doc) throws IOException { }; } }); - assertThat(results, equalTo(List.of(2L, 2L, 3L))); + assertThat(results, containsInAnyOrder(2L, 2L, 3L)); } } } @@ -122,9 +123,9 @@ public void collect(int doc) throws IOException { @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); @@ -139,9 +140,9 @@ public void testSort() throws IOException { public void testNow() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); LongScriptFieldData ifd = build("millis_ago", Map.of(), OnScriptError.FAIL).fielddataBuilder(mockFielddataContext()) @@ -164,9 +165,9 @@ public void testNow() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [4]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -194,8 +195,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(1)); @@ -206,8 +207,8 @@ public void testExistsQuery() throws IOException { @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); @@ -228,8 +229,8 @@ protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery("1", mockContext())), equalTo(1)); @@ -251,8 +252,8 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("1"), mockContext())), equalTo(1)); diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java index 86486cac893cf..3ded47b6d2671 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldFuzzyQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; @@ -68,18 +69,19 @@ protected StringScriptFieldFuzzyQuery mutate(StringScriptFieldFuzzyQuery orig) { @Override public void testMatches() { StringScriptFieldFuzzyQuery query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 1, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertTrue(query.matches(List.of("foa"))); - assertTrue(query.matches(List.of("foo", "bar"))); - assertFalse(query.matches(List.of("bar"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("foo"), scratch)); + assertTrue(query.matches(List.of("foa"), scratch)); + assertTrue(query.matches(List.of("foo", "bar"), scratch)); + assertFalse(query.matches(List.of("bar"), scratch)); query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 0, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertFalse(query.matches(List.of("foa"))); + assertTrue(query.matches(List.of("foo"), scratch)); + assertFalse(query.matches(List.of("foa"), scratch)); query = StringScriptFieldFuzzyQuery.build(randomScript(), leafFactory, "test", "foo", 2, 0, false); - assertTrue(query.matches(List.of("foo"))); - assertTrue(query.matches(List.of("foa"))); - assertTrue(query.matches(List.of("faa"))); - assertFalse(query.matches(List.of("faaa"))); + assertTrue(query.matches(List.of("foo"), scratch)); + assertTrue(query.matches(List.of("foa"), scratch)); + assertTrue(query.matches(List.of("faa"), scratch)); + assertFalse(query.matches(List.of("faaa"), scratch)); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java index 50c6786de1282..46f841c344e5f 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; @@ -84,13 +85,14 @@ public void testMatches() { 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); - assertTrue(query.matches(List.of("astuffb"))); - assertFalse(query.matches(List.of("astuffB"))); - assertFalse(query.matches(List.of("fffff"))); - assertFalse(query.matches(List.of("ab"))); - assertFalse(query.matches(List.of("aasdf"))); - assertFalse(query.matches(List.of("dsfb"))); - assertTrue(query.matches(List.of("astuffb", "fffff"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("astuffb"), scratch)); + assertFalse(query.matches(List.of("astuffB"), scratch)); + assertFalse(query.matches(List.of("fffff"), scratch)); + assertFalse(query.matches(List.of("ab"), scratch)); + assertFalse(query.matches(List.of("aasdf"), scratch)); + assertFalse(query.matches(List.of("dsfb"), scratch)); + assertTrue(query.matches(List.of("astuffb", "fffff"), scratch)); StringScriptFieldRegexpQuery ciQuery = new StringScriptFieldRegexpQuery( randomScript(), @@ -101,9 +103,8 @@ public void testMatches() { RegExp.ASCII_CASE_INSENSITIVE, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ); - assertTrue(ciQuery.matches(List.of("astuffB"))); - assertTrue(ciQuery.matches(List.of("Astuffb", "fffff"))); - + assertTrue(ciQuery.matches(List.of("astuffB"), scratch)); + assertTrue(ciQuery.matches(List.of("Astuffb", "fffff"), scratch)); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java index 37e24553f9fce..f6cd59f4254ad 100644 --- a/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQueryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; @@ -52,18 +53,19 @@ protected StringScriptFieldWildcardQuery mutate(StringScriptFieldWildcardQuery o @Override public void testMatches() { StringScriptFieldWildcardQuery query = new StringScriptFieldWildcardQuery(randomScript(), leafFactory, "test", "a*b", false); - assertTrue(query.matches(List.of("astuffb"))); - assertFalse(query.matches(List.of("Astuffb"))); - assertFalse(query.matches(List.of("fffff"))); - assertFalse(query.matches(List.of("a"))); - assertFalse(query.matches(List.of("b"))); - assertFalse(query.matches(List.of("aasdf"))); - assertFalse(query.matches(List.of("dsfb"))); - assertTrue(query.matches(List.of("astuffb", "fffff"))); + BytesRefBuilder scratch = new BytesRefBuilder(); + assertTrue(query.matches(List.of("astuffb"), scratch)); + assertFalse(query.matches(List.of("Astuffb"), scratch)); + assertFalse(query.matches(List.of("fffff"), scratch)); + assertFalse(query.matches(List.of("a"), scratch)); + assertFalse(query.matches(List.of("b"), scratch)); + assertFalse(query.matches(List.of("aasdf"), scratch)); + assertFalse(query.matches(List.of("dsfb"), scratch)); + assertTrue(query.matches(List.of("astuffb", "fffff"), scratch)); StringScriptFieldWildcardQuery ciQuery = new StringScriptFieldWildcardQuery(randomScript(), leafFactory, "test", "a*b", true); - assertTrue(ciQuery.matches(List.of("Astuffb"))); - assertTrue(ciQuery.matches(List.of("astuffB", "fffff"))); + assertTrue(ciQuery.matches(List.of("Astuffb"), scratch)); + assertTrue(ciQuery.matches(List.of("astuffB", "fffff"), scratch)); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index ea97bafc5e4c8..675b5959f35a3 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -64,6 +65,18 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC protected abstract String typeName(); + /** + * Add the provided document to the provided writer, and randomly flush. + * This is useful for situations where there are not enough documents indexed to trigger random flush and commit performed + * by {@link RandomIndexWriter}. Flushing is important to obtain multiple slices and inter-segment concurrency. + */ + protected static void addDocument(RandomIndexWriter iw, Iterable indexableFields) throws IOException { + iw.addDocument(indexableFields); + if (randomBoolean()) { + iw.flush(); + } + } + public final void testMinimalSerializesToItself() throws IOException { XContentBuilder orig = JsonXContent.contentBuilder().startObject(); createMapperService(runtimeFieldMapping(this::minimalMapping)).documentMapper().mapping().toXContent(orig, ToXContent.EMPTY_PARAMS); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java index 18020bd44ca6e..c178b20530f0c 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/AbstractGeoShapeScriptFieldQuery.java @@ -22,7 +22,7 @@ abstract class AbstractGeoShapeScriptFieldQuery extends AbstractScriptFieldQuery } @Override - protected boolean matches(GeometryFieldScript scriptContext, int docId) { + protected final boolean matches(GeometryFieldScript scriptContext, int docId) { scriptContext.runForDoc(docId); return matches(scriptContext.geometry()); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java index 331bfbf8cd305..592cb65800b71 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptFieldTypeTests.java @@ -99,8 +99,8 @@ protected ScriptFactory dummyScript() { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); List results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); @@ -128,6 +128,7 @@ public void collect(int doc) throws IOException { }; } }); + assertEquals(2, results.size()); } } } @@ -141,7 +142,7 @@ public void testSort() throws IOException { public void testFetch() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef(""" + addDocument(iw, List.of(new StoredField("_source", new BytesRef(""" {"foo": {"coordinates": [[45.0, 45.0], [0.0, 0.0]], "type" : "LineString"}}""")))); try (DirectoryReader reader = iw.getReader()) { SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -162,8 +163,8 @@ public void testFetch() throws IOException { @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); @@ -196,8 +197,8 @@ public double execute(ExplanationHolder explanation) { @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); - iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(0.0 0.0, 1.0 1.0)\" }")))); + addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": \"LINESTRING(45.0 45.0, 3.0 3.0)\" }")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(2)); From f51064aebdef0c66b4230b8306d1ad0de9d39c0d Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 26 Mar 2024 08:39:17 -0700 Subject: [PATCH 178/214] Set index mode earlier for new downsample index (#106728) * Set index mode earlier for new downsample index Downsample index is created using temporary index service using a static predefined subset of index settings. All other settings are later copied over from source index. As discovered in #106338 this causes context like index mode to be missing during initial index creation process. This PR adds index mode and related required settings to initial set of index settings in order to have access to this information during initial create index operation. --- .../xpack/downsample/TransportDownsampleAction.java | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 0570d93441be1..58401451fa86b 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -805,7 +805,17 @@ private void createDownsampleIndex( .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, String.valueOf(numberOfReplicas)) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1") .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED) - .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval); + .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval) + .put(IndexSettings.MODE.getKey(), sourceIndexMetadata.getIndexMode()) + .putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), sourceIndexMetadata.getRoutingPaths()) + .put( + IndexSettings.TIME_SERIES_START_TIME.getKey(), + sourceIndexMetadata.getSettings().get(IndexSettings.TIME_SERIES_START_TIME.getKey()) + ) + .put( + IndexSettings.TIME_SERIES_END_TIME.getKey(), + sourceIndexMetadata.getSettings().get(IndexSettings.TIME_SERIES_END_TIME.getKey()) + ); if (sourceIndexMetadata.getSettings().hasValue(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey())) { builder.put( MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), From a279a302bfe9289e522a4bb32499af1304a70c0b Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 26 Mar 2024 16:51:43 +0100 Subject: [PATCH 179/214] Move more XContent parsing to test codebase (#106704) Follow up to #105801 moving more parsers that are test-only over to the test codebase. --- .../action/DocWriteResponse.java | 68 ++------------ .../snapshots/status/SnapshotIndexStatus.java | 48 ---------- .../snapshots/status/SnapshotShardsStats.java | 32 ------- .../snapshots/status/SnapshotStatus.java | 77 ++-------------- .../broadcast/BaseBroadcastResponse.java | 28 ------ .../cluster/health/ClusterIndexHealth.java | 90 ++----------------- .../cluster/health/ClusterShardHealth.java | 65 ++------------ .../script/ScriptLanguagesInfo.java | 40 +-------- .../profile/SearchProfileDfsPhaseResult.java | 24 +---- .../query/QueryProfileShardResult.java | 41 --------- .../health/ClusterHealthResponsesTests.java | 2 +- .../status/SnapshotIndexStatusTests.java | 49 +++++++++- .../status/SnapshotShardsStatsTests.java | 29 +++++- .../snapshots/status/SnapshotStatusTests.java | 61 ++++++++++++- .../status/SnapshotsStatusResponseTests.java | 2 +- .../GetScriptLanguageResponseTests.java | 29 +++++- .../analyze/ReloadAnalyzersResponseTests.java | 1 - .../query/ValidateQueryResponseTests.java | 1 - .../action/bulk/BulkItemResponseTests.java | 55 +++++++++++- .../action/delete/DeleteResponseTests.java | 4 +- .../action/index/IndexResponseTests.java | 3 +- .../action/update/UpdateResponseTests.java | 3 +- .../health/ClusterIndexHealthTests.java | 72 ++++++++++++++- .../health/ClusterShardHealthTests.java | 46 +++++++++- .../SearchProfileDfsPhaseResultTests.java | 3 +- .../query/QueryProfileShardResultTests.java | 3 +- .../search/SearchResponseUtils.java | 62 ++++++++++++- .../AbstractBroadcastResponseTestCase.java | 29 ++++++ .../test/rest/ESRestTestCase.java | 3 +- .../termsenum/action/TermsEnumResponse.java | 32 ------- .../termsenum/TermsEnumResponseTests.java | 31 ++++++- 31 files changed, 502 insertions(+), 531 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index fdef41acb16da..685fc032431c3 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -12,13 +12,11 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; -import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -26,7 +24,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.net.URLEncoder; @@ -34,7 +31,6 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -43,14 +39,14 @@ */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, ToXContentObject { - private static final String _SHARDS = "_shards"; - private static final String _INDEX = "_index"; - private static final String _ID = "_id"; - private static final String _VERSION = "_version"; - private static final String _SEQ_NO = "_seq_no"; - private static final String _PRIMARY_TERM = "_primary_term"; - private static final String RESULT = "result"; - private static final String FORCED_REFRESH = "forced_refresh"; + public static final String _SHARDS = "_shards"; + public static final String _INDEX = "_index"; + public static final String _ID = "_id"; + public static final String _VERSION = "_version"; + public static final String _SEQ_NO = "_seq_no"; + public static final String _PRIMARY_TERM = "_primary_term"; + public static final String RESULT = "result"; + public static final String FORCED_REFRESH = "forced_refresh"; /** * An enum that represents the results of CRUD operations, primarily used to communicate the type of @@ -302,54 +298,6 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t return builder; } - /** - * Parse the output of the {@link #innerToXContent(XContentBuilder, Params)} method. - * - * This method is intended to be called by subclasses and must be called multiple times to parse all the information concerning - * {@link DocWriteResponse} objects. It always parses the current token, updates the given parsing context accordingly - * if needed and then immediately returns. - */ - public static void parseInnerToXContent(XContentParser parser, Builder context) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - if (token.isValue()) { - if (_INDEX.equals(currentFieldName)) { - // index uuid and shard id are unknown and can't be parsed back for now. - context.setShardId(new ShardId(new Index(parser.text(), IndexMetadata.INDEX_UUID_NA_VALUE), -1)); - } else if (_ID.equals(currentFieldName)) { - context.setId(parser.text()); - } else if (_VERSION.equals(currentFieldName)) { - context.setVersion(parser.longValue()); - } else if (RESULT.equals(currentFieldName)) { - String result = parser.text(); - for (Result r : Result.values()) { - if (r.getLowercase().equals(result)) { - context.setResult(r); - break; - } - } - } else if (FORCED_REFRESH.equals(currentFieldName)) { - context.setForcedRefresh(parser.booleanValue()); - } else if (_SEQ_NO.equals(currentFieldName)) { - context.setSeqNo(parser.longValue()); - } else if (_PRIMARY_TERM.equals(currentFieldName)) { - context.setPrimaryTerm(parser.longValue()); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (_SHARDS.equals(currentFieldName)) { - context.setShardInfo(ShardInfo.fromXContent(parser)); - } else { - parser.skipChildren(); // skip potential inner objects for forward compatibility - } - } else if (token == XContentParser.Token.START_ARRAY) { - parser.skipChildren(); // skip potential inner arrays for forward compatibility - } - } - /** * Base class of all {@link DocWriteResponse} builders. These {@link DocWriteResponse.Builder} are used during * xcontent parsing to temporarily store the parsed values, then the {@link Builder#build()} method is called to diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java index 5d66baf0216ad..4a98ff62f6293 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java @@ -8,26 +8,17 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; -import java.util.List; import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * Represents snapshot status of all shards in the index @@ -118,45 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - static final ObjectParser.NamedObjectParser PARSER; - static { - ConstructingObjectParser innerParser = new ConstructingObjectParser<>( - "snapshot_index_status", - true, - (Object[] parsedObjects, String index) -> { - int i = 0; - SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); - SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); - @SuppressWarnings("unchecked") - List shardStatuses = (List) parsedObjects[i]; - - final Map indexShards; - if (shardStatuses == null || shardStatuses.isEmpty()) { - indexShards = emptyMap(); - } else { - indexShards = Maps.newMapWithExpectedSize(shardStatuses.size()); - for (SnapshotIndexShardStatus shardStatus : shardStatuses) { - indexShards.put(shardStatus.getShardId().getId(), shardStatus); - } - } - return new SnapshotIndexStatus(index, indexShards, shardsStats, stats); - } - ); - innerParser.declareObject( - constructorArg(), - (p, c) -> SnapshotShardsStats.PARSER.apply(p, null), - new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS) - ); - innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS)); - innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS)); - PARSER = ((p, c, name) -> innerParser.apply(p, name)); - } - - public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - return PARSER.parse(parser, null, parser.currentName()); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java index 5bbc5368505db..28806b0aca87e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java @@ -8,18 +8,13 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * Status of a snapshot shards */ @@ -129,33 +124,6 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - Fields.SHARDS_STATS, - true, - (Object[] parsedObjects) -> { - int i = 0; - int initializingShards = (int) parsedObjects[i++]; - int startedShards = (int) parsedObjects[i++]; - int finalizingShards = (int) parsedObjects[i++]; - int doneShards = (int) parsedObjects[i++]; - int failedShards = (int) parsedObjects[i++]; - int totalShards = (int) parsedObjects[i]; - return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); - } - ); - static { - PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED)); - PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL)); - } - - public static SnapshotShardsStats fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index 956ce57d168e0..e228ad18641fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress.State; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; @@ -19,12 +18,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.snapshots.Snapshot; -import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -34,11 +28,7 @@ import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Status of a snapshot @@ -87,7 +77,7 @@ public class SnapshotStatus implements ChunkedToXContentObject, Writeable { updateShardStats(startTime, time); } - private SnapshotStatus( + SnapshotStatus( Snapshot snapshot, State state, List shards, @@ -182,12 +172,12 @@ public SnapshotStats getStats() { return stats; } - private static final String SNAPSHOT = "snapshot"; - private static final String REPOSITORY = "repository"; - private static final String UUID = "uuid"; - private static final String STATE = "state"; - private static final String INDICES = "indices"; - private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; + static final String SNAPSHOT = "snapshot"; + static final String REPOSITORY = "repository"; + static final String UUID = "uuid"; + static final String STATE = "state"; + static final String INDICES = "indices"; + static final String INCLUDE_GLOBAL_STATE = "include_global_state"; @Override public Iterator toXContentChunked(ToXContent.Params params) { @@ -206,59 +196,6 @@ public Iterator toXContentChunked(ToXContent.Params params }), getIndices().values().iterator(), Iterators.single((b, p) -> b.endObject().endObject())); } - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshot_status", - true, - (Object[] parsedObjects) -> { - int i = 0; - String name = (String) parsedObjects[i++]; - String repository = (String) parsedObjects[i++]; - String uuid = (String) parsedObjects[i++]; - String rawState = (String) parsedObjects[i++]; - Boolean includeGlobalState = (Boolean) parsedObjects[i++]; - SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); - SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); - @SuppressWarnings("unchecked") - List indices = ((List) parsedObjects[i]); - - Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid)); - SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState); - Map indicesStatus; - List shards; - if (indices == null || indices.isEmpty()) { - indicesStatus = emptyMap(); - shards = emptyList(); - } else { - indicesStatus = Maps.newMapWithExpectedSize(indices.size()); - shards = new ArrayList<>(); - for (SnapshotIndexStatus index : indices) { - indicesStatus.put(index.getIndex(), index); - shards.addAll(index.getShards().values()); - } - } - return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState); - } - ); - static { - PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT)); - PARSER.declareString(constructorArg(), new ParseField(REPOSITORY)); - PARSER.declareString(constructorArg(), new ParseField(UUID)); - PARSER.declareString(constructorArg(), new ParseField(STATE)); - PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE)); - PARSER.declareField( - constructorArg(), - SnapshotStats::fromXContent, - new ParseField(SnapshotStats.Fields.STATS), - ObjectParser.ValueType.OBJECT - ); - PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); - PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES)); - } - - public static SnapshotStatus fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - private void updateShardStats(long startTime, long time) { stats = new SnapshotStats(startTime, time, 0, 0, 0, 0, 0, 0); shardsStats = new SnapshotShardsStats(shards); diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java index b69b87190f2a7..3a27d6ac58534 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java @@ -13,15 +13,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.util.List; import static org.elasticsearch.action.support.DefaultShardOperationFailedException.readShardOperationFailed; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Base class for all broadcast operation based responses. @@ -30,35 +26,11 @@ public class BaseBroadcastResponse extends ActionResponse { public static final DefaultShardOperationFailedException[] EMPTY = new DefaultShardOperationFailedException[0]; - private static final ParseField _SHARDS_FIELD = new ParseField("_shards"); - private static final ParseField TOTAL_FIELD = new ParseField("total"); - private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful"); - private static final ParseField FAILED_FIELD = new ParseField("failed"); - private static final ParseField FAILURES_FIELD = new ParseField("failures"); - private final int totalShards; private final int successfulShards; private final int failedShards; private final DefaultShardOperationFailedException[] shardFailures; - @SuppressWarnings("unchecked") - public static void declareBroadcastFields(ConstructingObjectParser PARSER) { - ConstructingObjectParser shardsParser = new ConstructingObjectParser<>( - "_shards", - true, - arg -> new BaseBroadcastResponse((int) arg[0], (int) arg[1], (int) arg[2], (List) arg[3]) - ); - shardsParser.declareInt(constructorArg(), TOTAL_FIELD); - shardsParser.declareInt(constructorArg(), SUCCESSFUL_FIELD); - shardsParser.declareInt(constructorArg(), FAILED_FIELD); - shardsParser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> DefaultShardOperationFailedException.fromXContent(p), - FAILURES_FIELD - ); - PARSER.declareObject(constructorArg(), shardsParser, _SHARDS_FIELD); - } - public BaseBroadcastResponse(StreamInput in) throws IOException { totalShards = in.readVInt(); successfulShards = in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java index f236a9eff25a2..ad957f7a8f37f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterIndexHealth.java @@ -15,93 +15,25 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; -import static java.util.Collections.emptyMap; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public final class ClusterIndexHealth implements Writeable, ToXContentFragment { - private static final String STATUS = "status"; - private static final String NUMBER_OF_SHARDS = "number_of_shards"; - private static final String NUMBER_OF_REPLICAS = "number_of_replicas"; - private static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; - private static final String ACTIVE_SHARDS = "active_shards"; - private static final String RELOCATING_SHARDS = "relocating_shards"; - private static final String INITIALIZING_SHARDS = "initializing_shards"; - private static final String UNASSIGNED_SHARDS = "unassigned_shards"; - private static final String SHARDS = "shards"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_index_health", - true, - (parsedObjects, index) -> { - int i = 0; - int numberOfShards = (int) parsedObjects[i++]; - int numberOfReplicas = (int) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - int activePrimaryShards = (int) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i++]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - @SuppressWarnings("unchecked") - List shardList = (List) parsedObjects[i]; - final Map shards; - if (shardList == null || shardList.isEmpty()) { - shards = emptyMap(); - } else { - shards = Maps.newMapWithExpectedSize(shardList.size()); - for (ClusterShardHealth shardHealth : shardList) { - shards.put(shardHealth.getShardId(), shardHealth); - } - } - return new ClusterIndexHealth( - index, - numberOfShards, - numberOfReplicas, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - activePrimaryShards, - status, - shards - ); - } - ); - - public static final ObjectParser.NamedObjectParser SHARD_PARSER = ( - XContentParser p, - String indexIgnored, - String shardId) -> ClusterShardHealth.innerFromXContent(p, Integer.valueOf(shardId)); - - static { - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_REPLICAS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_PRIMARY_SHARDS)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - // Can be absent if LEVEL == 'indices' or 'cluster' - PARSER.declareNamedObjects(optionalConstructorArg(), SHARD_PARSER, new ParseField(SHARDS)); - } + static final String STATUS = "status"; + static final String NUMBER_OF_SHARDS = "number_of_shards"; + static final String NUMBER_OF_REPLICAS = "number_of_replicas"; + static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String SHARDS = "shards"; private final String index; private final int numberOfShards; @@ -279,10 +211,6 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa return builder; } - public static ClusterIndexHealth innerFromXContent(XContentParser parser, String index) { - return PARSER.apply(parser, index); - } - @Override public String toString() { return "ClusterIndexHealth{" diff --git a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java index b3aa4275f7be7..785b0db5cc807 100644 --- a/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java +++ b/server/src/main/java/org/elasticsearch/cluster/health/ClusterShardHealth.java @@ -17,59 +17,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public final class ClusterShardHealth implements Writeable, ToXContentFragment { - private static final String STATUS = "status"; - private static final String ACTIVE_SHARDS = "active_shards"; - private static final String RELOCATING_SHARDS = "relocating_shards"; - private static final String INITIALIZING_SHARDS = "initializing_shards"; - private static final String UNASSIGNED_SHARDS = "unassigned_shards"; - private static final String PRIMARY_ACTIVE = "primary_active"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "cluster_shard_health", - true, - (parsedObjects, shardId) -> { - int i = 0; - boolean primaryActive = (boolean) parsedObjects[i++]; - int activeShards = (int) parsedObjects[i++]; - int relocatingShards = (int) parsedObjects[i++]; - int initializingShards = (int) parsedObjects[i++]; - int unassignedShards = (int) parsedObjects[i++]; - String statusStr = (String) parsedObjects[i]; - ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); - return new ClusterShardHealth( - shardId, - status, - activeShards, - relocatingShards, - initializingShards, - unassignedShards, - primaryActive - ); - } - ); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField(PRIMARY_ACTIVE)); - PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS)); - PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS)); - PARSER.declareString(constructorArg(), new ParseField(STATUS)); - } + static final String STATUS = "status"; + static final String ACTIVE_SHARDS = "active_shards"; + static final String RELOCATING_SHARDS = "relocating_shards"; + static final String INITIALIZING_SHARDS = "initializing_shards"; + static final String UNASSIGNED_SHARDS = "unassigned_shards"; + static final String PRIMARY_ACTIVE = "primary_active"; private final int shardId; private final ClusterHealthStatus status; @@ -230,20 +191,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - static ClusterShardHealth innerFromXContent(XContentParser parser, Integer shardId) { - return PARSER.apply(parser, shardId); - } - - public static ClusterShardHealth fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String shardIdStr = parser.currentName(); - ClusterShardHealth parsed = innerFromXContent(parser, Integer.valueOf(shardIdStr)); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return parsed; - } - @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java b/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java index 7b3ea4fbe4581..b64383c562c50 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptLanguagesInfo.java @@ -11,23 +11,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; /** * The allowable types, languages and their corresponding contexts. When serialized there is a top level types_allowed list, @@ -68,10 +61,10 @@ * */ public class ScriptLanguagesInfo implements ToXContentObject, Writeable { - private static final ParseField TYPES_ALLOWED = new ParseField("types_allowed"); - private static final ParseField LANGUAGE_CONTEXTS = new ParseField("language_contexts"); - private static final ParseField LANGUAGE = new ParseField("language"); - private static final ParseField CONTEXTS = new ParseField("contexts"); + public static final ParseField TYPES_ALLOWED = new ParseField("types_allowed"); + public static final ParseField LANGUAGE_CONTEXTS = new ParseField("language_contexts"); + public static final ParseField LANGUAGE = new ParseField("language"); + public static final ParseField CONTEXTS = new ParseField("contexts"); public final Set typesAllowed; public final Map> languageContexts; @@ -86,31 +79,6 @@ public ScriptLanguagesInfo(StreamInput in) throws IOException { languageContexts = in.readImmutableMap(sin -> sin.readCollectionAsImmutableSet(StreamInput::readString)); } - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "script_languages_info", - true, - (a) -> new ScriptLanguagesInfo( - new HashSet<>((List) a[0]), - ((List>>) a[1]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) - ) - ); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser>, Void> LANGUAGE_CONTEXT_PARSER = - new ConstructingObjectParser<>("language_contexts", true, (m, name) -> new Tuple<>((String) m[0], Set.copyOf((List) m[1]))); - - static { - PARSER.declareStringArray(constructorArg(), TYPES_ALLOWED); - PARSER.declareObjectArray(constructorArg(), LANGUAGE_CONTEXT_PARSER, LANGUAGE_CONTEXTS); - LANGUAGE_CONTEXT_PARSER.declareString(constructorArg(), LANGUAGE); - LANGUAGE_CONTEXT_PARSER.declareStringArray(constructorArg(), CONTEXTS); - } - - public static ScriptLanguagesInfo fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(typesAllowed); diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java index 5f8e6a893c1b5..e83fa79c79460 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java @@ -15,20 +15,16 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; -import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - public class SearchProfileDfsPhaseResult implements Writeable, ToXContentObject { private final ProfileResult dfsShardResult; @@ -63,24 +59,8 @@ public void writeTo(StreamOutput out) throws IOException { } } - private static final ParseField STATISTICS = new ParseField("statistics"); - private static final ParseField KNN = new ParseField("knn"); - private static final InstantiatingObjectParser PARSER; - - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "search_profile_dfs_phase_result", - true, - SearchProfileDfsPhaseResult.class - ); - parser.declareObject(optionalConstructorArg(), (p, c) -> ProfileResult.fromXContent(p), STATISTICS); - parser.declareObjectArray(optionalConstructorArg(), (p, c) -> QueryProfileShardResult.fromXContent(p), KNN); - PARSER = parser.build(); - } - - public static SearchProfileDfsPhaseResult fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } + public static final ParseField STATISTICS = new ParseField("statistics"); + public static final ParseField KNN = new ParseField("knn"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index e72ef2d9b3ece..8aebde23d6a87 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -25,8 +24,6 @@ import java.util.List; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * A container class to hold the profile results for a single shard in the request. * Contains a list of query profiles, a collector tree and a total rewrite tree. @@ -139,42 +136,4 @@ public int hashCode() { public String toString() { return Strings.toString(this); } - - public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); - String currentFieldName = null; - List queryProfileResults = new ArrayList<>(); - long rewriteTime = 0; - Long vectorOperationsCount = null; - CollectorResult collector = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (REWRITE_TIME.equals(currentFieldName)) { - rewriteTime = parser.longValue(); - } else if (VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { - vectorOperationsCount = parser.longValue(); - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (QUERY_ARRAY.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - queryProfileResults.add(ProfileResult.fromXContent(parser)); - } - } else if (COLLECTOR.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - collector = CollectorResult.fromXContent(parser); - } - } else { - parser.skipChildren(); - } - } else { - parser.skipChildren(); - } - } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); - } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index d4231c9f7538b..11655a93097cc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -110,7 +110,7 @@ public class ClusterHealthResponsesTests extends AbstractXContentSerializingTest private static final ObjectParser.NamedObjectParser INDEX_PARSER = ( XContentParser parser, Void context, - String index) -> ClusterIndexHealth.innerFromXContent(parser, index); + String index) -> ClusterIndexHealthTests.parseInstance(parser, index); static { // ClusterStateHealth fields diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java index 4980d0f786d84..50f230022b375 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java @@ -8,17 +8,63 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.function.Predicate; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SnapshotIndexStatusTests extends AbstractXContentTestCase { + static final ObjectParser.NamedObjectParser PARSER; + static { + ConstructingObjectParser innerParser = new ConstructingObjectParser<>( + "snapshot_index_status", + true, + (Object[] parsedObjects, String index) -> { + int i = 0; + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") + List shardStatuses = (List) parsedObjects[i]; + + final Map indexShards; + if (shardStatuses == null || shardStatuses.isEmpty()) { + indexShards = emptyMap(); + } else { + indexShards = Maps.newMapWithExpectedSize(shardStatuses.size()); + for (SnapshotIndexShardStatus shardStatus : shardStatuses) { + indexShards.put(shardStatus.getShardId().getId(), shardStatus); + } + } + return new SnapshotIndexStatus(index, indexShards, shardsStats, stats); + } + ); + innerParser.declareObject( + constructorArg(), + (p, c) -> SnapshotShardsStatsTests.PARSER.apply(p, null), + new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS) + ); + innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS)); + innerParser.declareNamedObjects( + constructorArg(), + SnapshotIndexShardStatus.PARSER, + new ParseField(SnapshotIndexStatus.Fields.SHARDS) + ); + PARSER = ((p, c, name) -> innerParser.apply(p, name)); + } + @Override protected SnapshotIndexStatus createTestInstance() { String index = randomAlphaOfLength(10); @@ -40,7 +86,8 @@ protected Predicate getRandomFieldsExcludeFilter() { protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + SnapshotIndexStatus status = PARSER.parse(parser, null, parser.currentName()); XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); return status; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java index 9d4b8d601c63b..a9eacb49798f9 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java @@ -9,12 +9,39 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class SnapshotShardsStatsTests extends AbstractXContentTestCase { + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + SnapshotShardsStats.Fields.SHARDS_STATS, + true, + (Object[] parsedObjects) -> { + int i = 0; + int initializingShards = (int) parsedObjects[i++]; + int startedShards = (int) parsedObjects[i++]; + int finalizingShards = (int) parsedObjects[i++]; + int doneShards = (int) parsedObjects[i++]; + int failedShards = (int) parsedObjects[i++]; + int totalShards = (int) parsedObjects[i]; + return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); + } + ); + static { + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.INITIALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.STARTED)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.FINALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.DONE)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.FAILED)); + PARSER.declareInt(constructorArg(), new ParseField(SnapshotShardsStats.Fields.TOTAL)); + } + @Override protected SnapshotShardsStats createTestInstance() { int initializingShards = randomInt(); @@ -28,7 +55,7 @@ protected SnapshotShardsStats createTestInstance() { @Override protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException { - return SnapshotShardsStats.fromXContent(parser); + return PARSER.apply(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java index 9c28930f12382..a32a66a55454f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java @@ -11,20 +11,79 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Strings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.function.Predicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class SnapshotStatusTests extends AbstractChunkedSerializingTestCase { + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshot_status", + true, + (Object[] parsedObjects) -> { + int i = 0; + String name = (String) parsedObjects[i++]; + String repository = (String) parsedObjects[i++]; + String uuid = (String) parsedObjects[i++]; + String rawState = (String) parsedObjects[i++]; + Boolean includeGlobalState = (Boolean) parsedObjects[i++]; + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") + List indices = ((List) parsedObjects[i]); + + Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid)); + SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState); + Map indicesStatus; + List shards; + if (indices == null || indices.isEmpty()) { + indicesStatus = emptyMap(); + shards = emptyList(); + } else { + indicesStatus = Maps.newMapWithExpectedSize(indices.size()); + shards = new ArrayList<>(); + for (SnapshotIndexStatus index : indices) { + indicesStatus.put(index.getIndex(), index); + shards.addAll(index.getShards().values()); + } + } + return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState); + } + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.SNAPSHOT)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.REPOSITORY)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.UUID)); + PARSER.declareString(constructorArg(), new ParseField(SnapshotStatus.STATE)); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField(SnapshotStatus.INCLUDE_GLOBAL_STATE)); + PARSER.declareField( + constructorArg(), + SnapshotStats::fromXContent, + new ParseField(SnapshotStats.Fields.STATS), + ObjectParser.ValueType.OBJECT + ); + PARSER.declareObject(constructorArg(), SnapshotShardsStatsTests.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); + PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatusTests.PARSER, new ParseField(SnapshotStatus.INDICES)); + } + public void testToString() throws Exception { SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values()); String uuid = UUIDs.randomBase64UUID(); @@ -180,7 +239,7 @@ protected Predicate getRandomFieldsExcludeFilter() { @Override protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException { - return SnapshotStatus.fromXContent(parser); + return PARSER.parse(parser, null); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java index 21cba892669d0..6b921419c0fd4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java @@ -33,7 +33,7 @@ public class SnapshotsStatusResponseTests extends AbstractChunkedSerializingTest } ); static { - PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots")); + PARSER.declareObjectArray(constructorArg(), SnapshotStatusTests.PARSER, new ParseField("snapshots")); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java index f8d3871fbfa8f..ec56a57aa3a90 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetScriptLanguageResponseTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.Tuple; import org.elasticsearch.script.ScriptLanguagesInfo; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -22,8 +24,33 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class GetScriptLanguageResponseTests extends AbstractXContentSerializingTestCase { + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "script_languages_info", + true, + (a) -> new ScriptLanguagesInfo( + new HashSet<>((List) a[0]), + ((List>>) a[1]).stream().collect(Collectors.toMap(Tuple::v1, Tuple::v2)) + ) + ); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser>, Void> LANGUAGE_CONTEXT_PARSER = + new ConstructingObjectParser<>("language_contexts", true, (m, name) -> new Tuple<>((String) m[0], Set.copyOf((List) m[1]))); + + static { + PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.TYPES_ALLOWED); + PARSER.declareObjectArray(constructorArg(), LANGUAGE_CONTEXT_PARSER, ScriptLanguagesInfo.LANGUAGE_CONTEXTS); + LANGUAGE_CONTEXT_PARSER.declareString(constructorArg(), ScriptLanguagesInfo.LANGUAGE); + LANGUAGE_CONTEXT_PARSER.declareStringArray(constructorArg(), ScriptLanguagesInfo.CONTEXTS); + } + private static int MAX_VALUES = 4; private static final int MIN_LENGTH = 1; private static final int MAX_LENGTH = 16; @@ -38,7 +65,7 @@ protected GetScriptLanguageResponse createTestInstance() { @Override protected GetScriptLanguageResponse doParseInstance(XContentParser parser) throws IOException { - return new GetScriptLanguageResponse(ScriptLanguagesInfo.fromXContent(parser)); + return new GetScriptLanguageResponse(PARSER.parse(parser, null)); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java index f0802e471fc38..8cf8a1c064004 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersResponseTests.java @@ -25,7 +25,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.action.support.broadcast.BaseBroadcastResponse.declareBroadcastFields; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class ReloadAnalyzersResponseTests extends AbstractBroadcastResponseTestCase { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java index 9ec910e79918c..5df0fa27f1016 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Set; -import static org.elasticsearch.action.support.broadcast.BaseBroadcastResponse.declareBroadcastFields; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java index 6c45367baf674..7c50ba3beae76 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkItemResponseTests.java @@ -17,12 +17,15 @@ import org.elasticsearch.action.delete.DeleteResponseTests; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.IndexResponseTests; +import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.action.update.UpdateResponseTests; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -43,6 +46,54 @@ public class BulkItemResponseTests extends ESTestCase { + /** + * Parse the output of the {@link DocWriteResponse#innerToXContent(XContentBuilder, ToXContent.Params)} method. + * + * This method is intended to be called by subclasses and must be called multiple times to parse all the information concerning + * {@link DocWriteResponse} objects. It always parses the current token, updates the given parsing context accordingly + * if needed and then immediately returns. + */ + public static void parseInnerToXContent(XContentParser parser, DocWriteResponse.Builder context) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + + if (token.isValue()) { + if (DocWriteResponse._INDEX.equals(currentFieldName)) { + // index uuid and shard id are unknown and can't be parsed back for now. + context.setShardId(new ShardId(new Index(parser.text(), IndexMetadata.INDEX_UUID_NA_VALUE), -1)); + } else if (DocWriteResponse._ID.equals(currentFieldName)) { + context.setId(parser.text()); + } else if (DocWriteResponse._VERSION.equals(currentFieldName)) { + context.setVersion(parser.longValue()); + } else if (DocWriteResponse.RESULT.equals(currentFieldName)) { + String result = parser.text(); + for (DocWriteResponse.Result r : DocWriteResponse.Result.values()) { + if (r.getLowercase().equals(result)) { + context.setResult(r); + break; + } + } + } else if (DocWriteResponse.FORCED_REFRESH.equals(currentFieldName)) { + context.setForcedRefresh(parser.booleanValue()); + } else if (DocWriteResponse._SEQ_NO.equals(currentFieldName)) { + context.setSeqNo(parser.longValue()); + } else if (DocWriteResponse._PRIMARY_TERM.equals(currentFieldName)) { + context.setPrimaryTerm(parser.longValue()); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (DocWriteResponse._SHARDS.equals(currentFieldName)) { + context.setShardInfo(ReplicationResponse.ShardInfo.fromXContent(parser)); + } else { + parser.skipChildren(); // skip potential inner objects for forward compatibility + } + } else if (token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); // skip potential inner arrays for forward compatibility + } + } + public void testBulkItemResponseShouldContainTypeInV7CompatibilityMode() throws IOException { BulkItemResponse bulkItemResponse = BulkItemResponse.success( randomInt(), @@ -192,7 +243,7 @@ public static BulkItemResponse itemResponseFromXContent(XContentParser parser, i if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) { final IndexResponse.Builder indexResponseBuilder = new IndexResponse.Builder(); builder = indexResponseBuilder; - itemParser = indexParser -> DocWriteResponse.parseInnerToXContent(indexParser, indexResponseBuilder); + itemParser = indexParser -> parseInnerToXContent(indexParser, indexResponseBuilder); } else if (opType == DocWriteRequest.OpType.UPDATE) { final UpdateResponse.Builder updateResponseBuilder = new UpdateResponse.Builder(); builder = updateResponseBuilder; @@ -201,7 +252,7 @@ public static BulkItemResponse itemResponseFromXContent(XContentParser parser, i } else if (opType == DocWriteRequest.OpType.DELETE) { final DeleteResponse.Builder deleteResponseBuilder = new DeleteResponse.Builder(); builder = deleteResponseBuilder; - itemParser = deleteParser -> DocWriteResponse.parseInnerToXContent(deleteParser, deleteResponseBuilder); + itemParser = deleteParser -> parseInnerToXContent(deleteParser, deleteResponseBuilder); } else { throwUnknownField(currentFieldName, parser); } diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java index 937ac2d26ebb9..b22a30b533dd2 100644 --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteResponseTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.action.delete; -import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -119,7 +119,7 @@ private static DeleteResponse parseInstance(XContentParser parser) throws IOExce DeleteResponse.Builder context = new DeleteResponse.Builder(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - DocWriteResponse.parseInnerToXContent(parser, context); + BulkItemResponseTests.parseInnerToXContent(parser, context); } return context.build(); } diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java index c8a8c3853601d..878c35b449366 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexResponseTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.index; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -126,7 +127,7 @@ private static IndexResponse parseInstanceFromXContent(XContentParser parser) th ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); IndexResponse.Builder context = new IndexResponse.Builder(); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - DocWriteResponse.parseInnerToXContent(parser, context); + BulkItemResponseTests.parseInnerToXContent(parser, context); } return context.build(); } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index d35162287e3ac..0eefeb87d3e02 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.update; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponseTests; import org.elasticsearch.action.index.IndexResponseTests; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.common.Strings; @@ -214,7 +215,7 @@ public static void parseXContentFields(XContentParser parser, UpdateResponse.Bui context.setGetResult(GetResult.fromXContentEmbedded(parser)); } } else { - DocWriteResponse.parseInnerToXContent(parser, context); + BulkItemResponseTests.parseInnerToXContent(parser, context); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java index 637a18547b1b2..48d28462231a0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterIndexHealthTests.java @@ -12,8 +12,12 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTableGenerator; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -21,12 +25,16 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.function.Predicate; import java.util.regex.Pattern; +import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.CoreMatchers.equalTo; public class ClusterIndexHealthTests extends AbstractXContentSerializingTestCase { @@ -106,7 +114,7 @@ protected ClusterIndexHealth doParseInstance(XContentParser parser) throws IOExc XContentParser.Token token = parser.nextToken(); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); String index = parser.currentName(); - ClusterIndexHealth parsed = ClusterIndexHealth.innerFromXContent(parser, index); + ClusterIndexHealth parsed = parseInstance(parser, index); ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); return parsed; } @@ -288,4 +296,66 @@ protected ClusterIndexHealth mutateInstance(ClusterIndexHealth instance) { throw new UnsupportedOperationException(); } } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_index_health", + true, + (parsedObjects, index) -> { + int i = 0; + int numberOfShards = (int) parsedObjects[i++]; + int numberOfReplicas = (int) parsedObjects[i++]; + int activeShards = (int) parsedObjects[i++]; + int relocatingShards = (int) parsedObjects[i++]; + int initializingShards = (int) parsedObjects[i++]; + int unassignedShards = (int) parsedObjects[i++]; + int activePrimaryShards = (int) parsedObjects[i++]; + String statusStr = (String) parsedObjects[i++]; + ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); + @SuppressWarnings("unchecked") + List shardList = (List) parsedObjects[i]; + final Map shards; + if (shardList == null || shardList.isEmpty()) { + shards = emptyMap(); + } else { + shards = Maps.newMapWithExpectedSize(shardList.size()); + for (ClusterShardHealth shardHealth : shardList) { + shards.put(shardHealth.getShardId(), shardHealth); + } + } + return new ClusterIndexHealth( + index, + numberOfShards, + numberOfReplicas, + activeShards, + relocatingShards, + initializingShards, + unassignedShards, + activePrimaryShards, + status, + shards + ); + } + ); + + public static final ObjectParser.NamedObjectParser SHARD_PARSER = ( + XContentParser p, + String indexIgnored, + String shardId) -> ClusterShardHealthTests.PARSER.apply(p, Integer.valueOf(shardId)); + + static { + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.NUMBER_OF_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.NUMBER_OF_REPLICAS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.ACTIVE_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.RELOCATING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.INITIALIZING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.UNASSIGNED_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterIndexHealth.ACTIVE_PRIMARY_SHARDS)); + PARSER.declareString(constructorArg(), new ParseField(ClusterIndexHealth.STATUS)); + // Can be absent if LEVEL == 'indices' or 'cluster' + PARSER.declareNamedObjects(optionalConstructorArg(), SHARD_PARSER, new ParseField(ClusterIndexHealth.SHARDS)); + } + + public static ClusterIndexHealth parseInstance(XContentParser parser, String index) { + return PARSER.apply(parser, index); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java index ce7c366ff30e6..1e1eacba183d2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterShardHealthTests.java @@ -9,17 +9,61 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Arrays; import java.util.function.Predicate; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ClusterShardHealthTests extends AbstractXContentSerializingTestCase { + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cluster_shard_health", + true, + (parsedObjects, shardId) -> { + int i = 0; + boolean primaryActive = (boolean) parsedObjects[i++]; + int activeShards = (int) parsedObjects[i++]; + int relocatingShards = (int) parsedObjects[i++]; + int initializingShards = (int) parsedObjects[i++]; + int unassignedShards = (int) parsedObjects[i++]; + String statusStr = (String) parsedObjects[i]; + ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr); + return new ClusterShardHealth( + shardId, + status, + activeShards, + relocatingShards, + initializingShards, + unassignedShards, + primaryActive + ); + } + ); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField(ClusterShardHealth.PRIMARY_ACTIVE)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.ACTIVE_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.RELOCATING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.INITIALIZING_SHARDS)); + PARSER.declareInt(constructorArg(), new ParseField(ClusterShardHealth.UNASSIGNED_SHARDS)); + PARSER.declareString(constructorArg(), new ParseField(ClusterShardHealth.STATUS)); + } + @Override protected ClusterShardHealth doParseInstance(XContentParser parser) throws IOException { - return ClusterShardHealth.fromXContent(parser); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String shardIdStr = parser.currentName(); + ClusterShardHealth parsed = PARSER.apply(parser, Integer.valueOf(shardIdStr)); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + return parsed; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java index c89edb29b5058..4855a043c565a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResultTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.profile; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.profile.query.QueryProfileShardResultTests; @@ -48,7 +49,7 @@ protected Reader instanceReader() { @Override protected SearchProfileDfsPhaseResult doParseInstance(XContentParser parser) throws IOException { - return SearchProfileDfsPhaseResult.fromXContent(parser); + return SearchResponseUtils.parseProfileDfsPhaseResult(parser); } public void testCombineQueryProfileShardResults() { diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java index f28425172ead5..56520c0c6d033 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.profile.query; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileResultTests; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -51,7 +52,7 @@ protected QueryProfileShardResult mutateInstance(QueryProfileShardResult instanc @Override protected QueryProfileShardResult doParseInstance(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - QueryProfileShardResult result = QueryProfileShardResult.fromXContent(parser); + QueryProfileShardResult result = SearchResponseUtils.parseQueryProfileShardResult(parser); ensureExpectedToken(null, parser.nextToken(), parser); return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 71837ccf14387..8831149fec905 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -24,10 +24,12 @@ import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult; +import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.InstantiatingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -40,6 +42,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public enum SearchResponseUtils { ; @@ -439,7 +442,7 @@ private static void parseProfileResultsEntry(XContentParser parser, Map PROFILE_DFS_PHASE_RESULT_PARSER; + + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "search_profile_dfs_phase_result", + true, + SearchProfileDfsPhaseResult.class + ); + parser.declareObject(optionalConstructorArg(), (p, c) -> ProfileResult.fromXContent(p), SearchProfileDfsPhaseResult.STATISTICS); + parser.declareObjectArray(optionalConstructorArg(), (p, c) -> parseQueryProfileShardResult(p), SearchProfileDfsPhaseResult.KNN); + PROFILE_DFS_PHASE_RESULT_PARSER = parser.build(); + } + + public static SearchProfileDfsPhaseResult parseProfileDfsPhaseResult(XContentParser parser) throws IOException { + return PROFILE_DFS_PHASE_RESULT_PARSER.parse(parser, null); + } + + public static QueryProfileShardResult parseQueryProfileShardResult(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); + String currentFieldName = null; + List queryProfileResults = new ArrayList<>(); + long rewriteTime = 0; + Long vectorOperationsCount = null; + CollectorResult collector = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (QueryProfileShardResult.REWRITE_TIME.equals(currentFieldName)) { + rewriteTime = parser.longValue(); + } else if (QueryProfileShardResult.VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { + vectorOperationsCount = parser.longValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (QueryProfileShardResult.QUERY_ARRAY.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + queryProfileResults.add(ProfileResult.fromXContent(parser)); + } + } else if (QueryProfileShardResult.COLLECTOR.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + collector = CollectorResult.fromXContent(parser); + } + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java index 5f720eededf02..751eed222ee7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBroadcastResponseTestCase.java @@ -10,12 +10,15 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -24,12 +27,38 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; public abstract class AbstractBroadcastResponseTestCase extends AbstractXContentTestCase { + private static final ParseField _SHARDS_FIELD = new ParseField("_shards"); + private static final ParseField TOTAL_FIELD = new ParseField("total"); + private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful"); + private static final ParseField FAILED_FIELD = new ParseField("failed"); + private static final ParseField FAILURES_FIELD = new ParseField("failures"); + + @SuppressWarnings("unchecked") + public static void declareBroadcastFields(ConstructingObjectParser PARSER) { + ConstructingObjectParser shardsParser = new ConstructingObjectParser<>( + "_shards", + true, + arg -> new BaseBroadcastResponse((int) arg[0], (int) arg[1], (int) arg[2], (List) arg[3]) + ); + shardsParser.declareInt(constructorArg(), TOTAL_FIELD); + shardsParser.declareInt(constructorArg(), SUCCESSFUL_FIELD); + shardsParser.declareInt(constructorArg(), FAILED_FIELD); + shardsParser.declareObjectArray( + optionalConstructorArg(), + (p, c) -> DefaultShardOperationFailedException.fromXContent(p), + FAILURES_FIELD + ); + PARSER.declareObject(constructorArg(), shardsParser, _SHARDS_FIELD); + } + @Override protected T createTestInstance() { int totalShards = randomIntBetween(1, 10); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 307daddd17c37..6905ee391a6eb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.AbstractBroadcastResponseTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; @@ -1312,7 +1313,7 @@ protected static BroadcastResponse refresh(String index) throws IOException { ); static { - BaseBroadcastResponse.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); + AbstractBroadcastResponseTestCase.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); } protected static BroadcastResponse refresh(RestClient client, String index) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java index 43dc92857551a..d89732cb3b177 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TermsEnumResponse.java @@ -7,22 +7,15 @@ package org.elasticsearch.xpack.core.termsenum.action; import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * The response of the _terms_enum action. */ @@ -31,28 +24,6 @@ public class TermsEnumResponse extends BroadcastResponse { public static final String TERMS_FIELD = "terms"; public static final String COMPLETE_FIELD = "complete"; - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "term_enum_results", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new TermsEnumResponse( - (List) arg[1], - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()), - (Boolean) arg[2] - ); - } - ); - static { - declareBroadcastFields(PARSER); - PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TERMS_FIELD)); - PARSER.declareBoolean(optionalConstructorArg(), new ParseField(COMPLETE_FIELD)); - } - private final List terms; private boolean complete; @@ -106,7 +77,4 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.field(COMPLETE_FIELD, complete); } - public static TermsEnumResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java index a31c44a165cdf..1804de134c8fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/termsenum/TermsEnumResponseTests.java @@ -8,19 +8,48 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.AbstractBroadcastResponseTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.termsenum.action.TermsEnumResponse; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class TermsEnumResponseTests extends AbstractBroadcastResponseTestCase { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "term_enum_results", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new TermsEnumResponse( + (List) arg[1], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()), + (Boolean) arg[2] + ); + } + ); + + static { + AbstractBroadcastResponseTestCase.declareBroadcastFields(PARSER); + PARSER.declareStringArray(optionalConstructorArg(), new ParseField(TermsEnumResponse.TERMS_FIELD)); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField(TermsEnumResponse.COMPLETE_FIELD)); + } + protected static List getRandomTerms() { int termCount = randomIntBetween(0, 100); Set uniqueTerms = Sets.newHashSetWithExpectedSize(termCount); @@ -48,7 +77,7 @@ private static TermsEnumResponse createRandomTermEnumResponse() { @Override protected TermsEnumResponse doParseInstance(XContentParser parser) throws IOException { - return TermsEnumResponse.fromXContent(parser); + return PARSER.apply(parser, null); } @Override From cb1b8fceaecc22d061eb6aabd7dd36dae20b778d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 26 Mar 2024 12:17:16 -0400 Subject: [PATCH 180/214] Fix field caps and field level security (#106731) If you perform a `_field_caps` request on two indices with the same mapping but *different* field level security settings we were returning incorrect results. In particular, we'd return return whatever fields were visible in one of the indices. It's random which one we'd return. --- docs/changelog/106731.yaml | 5 ++ .../search/fieldcaps/FieldCapabilitiesIT.java | 21 ++++- .../fieldcaps/FieldCapabilitiesFetcher.java | 8 +- .../cluster/metadata/Metadata.java | 7 +- .../index/mapper/MapperRegistry.java | 8 +- .../elasticsearch/indices/IndicesModule.java | 21 ++--- .../elasticsearch/indices/IndicesService.java | 4 +- .../elasticsearch/plugins/FieldPredicate.java | 90 +++++++++++++++++++ .../elasticsearch/plugins/MapperPlugin.java | 22 ++--- .../FieldCapabilitiesFilterTests.java | 31 +++++-- .../cluster/metadata/MetadataTests.java | 3 +- .../mapper/FieldFilterMapperPluginTests.java | 21 ++++- .../indices/IndicesModuleTests.java | 90 +++++++++++++++---- .../index/mapper/MockFieldFilterPlugin.java | 22 ++++- .../permission/AutomatonFieldPredicate.java | 78 ++++++++++++++++ .../authz/permission/FieldPermissions.java | 10 +++ .../core/LocalStateCompositeXPackPlugin.java | 5 +- .../AutomatonFieldPredicateTests.java | 36 ++++++++ .../xpack/esql/EsqlSecurityIT.java | 82 ++++++++++++++++- .../src/javaRestTest/resources/roles.yml | 13 +++ .../integration/FieldLevelSecurityTests.java | 66 +++++++++++++- .../xpack/security/Security.java | 13 +-- .../xpack/security/SecurityTests.java | 12 +-- 23 files changed, 581 insertions(+), 87 deletions(-) create mode 100644 docs/changelog/106731.yaml create mode 100644 server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java diff --git a/docs/changelog/106731.yaml b/docs/changelog/106731.yaml new file mode 100644 index 0000000000000..0d8e16a8f9616 --- /dev/null +++ b/docs/changelog/106731.yaml @@ -0,0 +1,5 @@ +pr: 106731 +summary: Fix field caps and field level security +area: Security +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index 282e29866a699..64f04d46a9a90 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; @@ -78,7 +79,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.IntStream; import static java.util.Collections.singletonList; @@ -809,8 +809,23 @@ public Map getMetadataMappers() { } @Override - public Function> getFieldFilter() { - return index -> field -> field.equals("playlist") == false; + public Function getFieldFilter() { + return index -> new FieldPredicate() { + @Override + public boolean test(String field) { + return field.equals("playlist") == false; + } + + @Override + public String modifyHash(String hash) { + return "not-playlist:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 6028a6e21ecff..51cb05f981177 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; @@ -123,14 +124,15 @@ private FieldCapabilitiesIndexResponse doFetch( final String shardUuid = indexService.getShard(shardId.getId()).getShardUuid(); indexMappingHash = mapping == null ? shardUuid : shardUuid + mapping.getSha256(); } + FieldPredicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); if (indexMappingHash != null) { + indexMappingHash = fieldPredicate.modifyHash(indexMappingHash); final Map existing = indexMappingHashToResponses.get(indexMappingHash); if (existing != null) { return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, existing, true); } } task.ensureNotCancelled(); - Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); final Map responseMap = retrieveFieldCaps( searchExecutionContext, fieldNameFilter, @@ -151,7 +153,7 @@ static Map retrieveFieldCaps( Predicate fieldNameFilter, String[] filters, String[] types, - Predicate indexFieldfilter, + FieldPredicate fieldPredicate, IndexShard indexShard, boolean includeEmptyFields ) { @@ -169,7 +171,7 @@ static Map retrieveFieldCaps( } MappedFieldType ft = entry.getValue(); if ((includeEmptyFields || ft.fieldHasValue(fieldInfos)) - && (indexFieldfilter.test(ft.name()) || context.isMetadataField(ft.name())) + && (fieldPredicate.test(ft.name()) || context.isMetadataField(ft.name())) && (filter == null || filter.test(ft))) { IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( field, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index b450251ff7e3f..f424861c5b7ff 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.Transports; @@ -921,7 +922,7 @@ private void findAliasInfo(final String[] aliases, final String[] possibleMatche */ public Map findMappings( String[] concreteIndices, - Function> fieldFilter, + Function> fieldFilter, Runnable onNextIndex ) { assert Transports.assertNotTransportThread("decompressing mappings is too expensive for a transport thread"); @@ -974,7 +975,7 @@ private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Pre if (mappingMetadata == null) { return MappingMetadata.EMPTY_MAPPINGS; } - if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + if (fieldPredicate == FieldPredicate.ACCEPT_ALL) { return mappingMetadata; } Map sourceAsMap = XContentHelper.convertToMap(mappingMetadata.source().compressedReference(), true).v2(); @@ -997,7 +998,7 @@ private static MappingMetadata filterFields(MappingMetadata mappingMetadata, Pre @SuppressWarnings("unchecked") private static boolean filterFields(String currentPath, Map fields, Predicate fieldPredicate) { - assert fieldPredicate != MapperPlugin.NOOP_FIELD_PREDICATE; + assert fieldPredicate != FieldPredicate.ACCEPT_ALL; Iterator> entryIterator = fields.entrySet().iterator(); while (entryIterator.hasNext()) { Map.Entry entry = entryIterator.next(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java index dcf24c9a61bbd..aa2a7ce2f3996 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java @@ -10,13 +10,13 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import java.util.function.Function; -import java.util.function.Predicate; /** * A registry for all field mappers. @@ -29,13 +29,13 @@ public final class MapperRegistry { private final Map metadataMapperParsers7x; private final Map metadataMapperParsers6x; private final Map metadataMapperParsers5x; - private final Function> fieldFilter; + private final Function fieldFilter; public MapperRegistry( Map mapperParsers, Map runtimeFieldParsers, Map metadataMapperParsers, - Function> fieldFilter + Function fieldFilter ) { this.mapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(mapperParsers)); this.runtimeFieldParsers = runtimeFieldParsers; @@ -92,7 +92,7 @@ public Map getMetadataMapperParsers(Inde * {@link MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be returned by get mappings, * get index, get field mappings and field capabilities API. */ - public Function> getFieldFilter() { + public Function getFieldFilter() { return fieldFilter; } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 048d9adb8e7e3..b17777fc5a91e 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.store.IndicesStore; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -83,7 +84,6 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; /** * Configures classes and services that are shared by indices on each node. @@ -307,18 +307,15 @@ public static Set getBuiltInMetadataFields() { return builtInMetadataFields; } - private static Function> getFieldFilter(List mapperPlugins) { - Function> fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; + private static Function getFieldFilter(List mapperPlugins) { + Function fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; for (MapperPlugin mapperPlugin : mapperPlugins) { fieldFilter = and(fieldFilter, mapperPlugin.getFieldFilter()); } return fieldFilter; } - private static Function> and( - Function> first, - Function> second - ) { + private static Function and(Function first, Function second) { // the purpose of this method is to not chain no-op field predicates, so that we can easily find out when no plugins plug in // a field filter, hence skip the mappings filtering part as a whole, as it requires parsing mappings into a map. if (first == MapperPlugin.NOOP_FIELD_FILTER) { @@ -328,15 +325,15 @@ private static Function> and( return first; } return index -> { - Predicate firstPredicate = first.apply(index); - Predicate secondPredicate = second.apply(index); - if (firstPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + FieldPredicate firstPredicate = first.apply(index); + FieldPredicate secondPredicate = second.apply(index); + if (firstPredicate == FieldPredicate.ACCEPT_ALL) { return secondPredicate; } - if (secondPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + if (secondPredicate == FieldPredicate.ACCEPT_ALL) { return firstPredicate; } - return firstPredicate.and(secondPredicate); + return new FieldPredicate.And(firstPredicate, secondPredicate); }; } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 3319b29df6dfa..026a20415aa91 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -128,6 +128,7 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.store.CompositeIndexFoldersDeletionListener; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesService; @@ -168,7 +169,6 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; -import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.Collections.emptyList; @@ -1756,7 +1756,7 @@ public void clearIndexShardCache(ShardId shardId, boolean queryCache, boolean fi * {@link org.elasticsearch.plugins.MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be * returned by get mappings, get index, get field mappings and field capabilities API. */ - public Function> getFieldFilter() { + public Function getFieldFilter() { return mapperRegistry.getFieldFilter(); } diff --git a/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java b/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java new file mode 100644 index 0000000000000..32692b9740f91 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/FieldPredicate.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.plugins; + +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.cluster.metadata.MappingMetadata; + +import java.util.function.Predicate; + +/** + * Filter for visible fields. + */ +public interface FieldPredicate extends Accountable, Predicate { + /** + * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index + * get field mappings and field capabilities API will return every field that's present in the mappings. + */ + FieldPredicate ACCEPT_ALL = new FieldPredicate() { + @Override + public boolean test(String field) { + return true; + } + + @Override + public String modifyHash(String hash) { + return hash; + } + + @Override + public long ramBytesUsed() { + return 0; // Shared + } + + @Override + public String toString() { + return "accept all"; + } + }; + + /** + * Should this field be included? + */ + @Override + boolean test(String field); + + /** + * Modify the {@link MappingMetadata#getSha256} to track any filtering this predicate + * has performed on the list of fields. + */ + String modifyHash(String hash); + + class And implements FieldPredicate { + private static final long SHALLOW_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(And.class); + + private final FieldPredicate first; + private final FieldPredicate second; + + public And(FieldPredicate first, FieldPredicate second) { + this.first = first; + this.second = second; + } + + @Override + public boolean test(String field) { + return first.test(field) && second.test(field); + } + + @Override + public String modifyHash(String hash) { + return second.modifyHash(first.modifyHash(hash)); + } + + @Override + public long ramBytesUsed() { + return SHALLOW_RAM_BYTES_USED + first.ramBytesUsed() + second.ramBytesUsed(); + } + + @Override + public String toString() { + return first + " then " + second; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java index 401c014488f88..45f04487886d3 100644 --- a/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/MapperPlugin.java @@ -8,7 +8,6 @@ package org.elasticsearch.plugins; -import org.elasticsearch.core.Predicates; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.RuntimeField; @@ -16,7 +15,6 @@ import java.util.Collections; import java.util.Map; import java.util.function.Function; -import java.util.function.Predicate; /** * An extension point for {@link Plugin} implementations to add custom mappers @@ -62,19 +60,23 @@ default Map getMetadataMappers() { * get index, get field mappings and field capabilities API. Useful to filter the fields that such API return. The predicate receives * the field name as input argument and should return true to show the field and false to hide it. */ - default Function> getFieldFilter() { + default Function getFieldFilter() { return NOOP_FIELD_FILTER; } - /** - * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index - * get field mappings and field capabilities API will return every field that's present in the mappings. - */ - Predicate NOOP_FIELD_PREDICATE = Predicates.always(); - /** * The default field filter applied, which doesn't filter anything. That means that by default get mappings, get index * get field mappings and field capabilities API will return every field that's present in the mappings. */ - Function> NOOP_FIELD_FILTER = index -> NOOP_FIELD_PREDICATE; + Function NOOP_FIELD_FILTER = new Function<>() { + @Override + public FieldPredicate apply(String index) { + return FieldPredicate.ACCEPT_ALL; + } + + @Override + public String toString() { + return "accept all"; + } + }; } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java index ffdc7b9ca7652..478012567c1ae 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java @@ -14,10 +14,10 @@ import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.plugins.FieldPredicate; import java.io.IOException; import java.util.Map; -import java.util.function.Predicate; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -46,7 +46,7 @@ public void testExcludeNestedFields() throws IOException { s -> true, new String[] { "-nested" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -74,7 +74,7 @@ public void testMetadataFilters() throws IOException { s -> true, new String[] { "+metadata" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -87,7 +87,7 @@ public void testMetadataFilters() throws IOException { s -> true, new String[] { "-metadata" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -120,7 +120,7 @@ public void testExcludeMultifields() throws IOException { s -> true, new String[] { "-multifield" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -151,7 +151,7 @@ public void testDontIncludeParentInfo() throws IOException { s -> true, new String[] { "-parent" }, Strings.EMPTY_ARRAY, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); @@ -171,7 +171,22 @@ public void testSecurityFilter() throws IOException { } } """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - Predicate securityFilter = f -> f.startsWith("permitted"); + FieldPredicate securityFilter = new FieldPredicate() { + @Override + public boolean test(String field) { + return field.startsWith("permitted"); + } + + @Override + public String modifyHash(String hash) { + return "only-permitted:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; { Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( @@ -223,7 +238,7 @@ public void testFieldTypeFiltering() throws IOException { s -> true, Strings.EMPTY_ARRAY, new String[] { "text", "keyword" }, - f -> true, + FieldPredicate.ACCEPT_ALL, getMockIndexShard(), true ); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 1e35a40dedc17..955d7d2de6882 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; @@ -786,7 +787,7 @@ public void testFindMappingsWithFilters() throws IOException { if (index.equals("index2")) { return Predicates.never(); } - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; }, Metadata.ON_NEXT_INDEX_FIND_MAPPINGS_NOOP); assertIndexMappingsNoFields(mappings, "index2"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java index 2b8be2882c409..ce406b604ba62 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -32,7 +33,6 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.MetadataTests.assertLeafs; import static org.elasticsearch.cluster.metadata.MetadataTests.assertMultiField; @@ -246,8 +246,23 @@ private static void assertNotFiltered(MappingMetadata mappingMetadata) { public static class FieldFilterPlugin extends Plugin implements MapperPlugin { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.endsWith("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> false == index.equals("filtered") ? FieldPredicate.ACCEPT_ALL : new FieldPredicate() { + @Override + public boolean test(String field) { + return field.endsWith("visible"); + } + + @Override + public String modifyHash(String hash) { + return "only-visible:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index cade1e66c7fc7..0216bad7cf7a3 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesRoutingHashFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -44,11 +45,11 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; -import java.util.function.Predicate; import static org.elasticsearch.test.LambdaMatchers.falseWith; import static org.elasticsearch.test.LambdaMatchers.trueWith; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; @@ -246,24 +247,24 @@ public void testGetFieldFilter() { List mapperPlugins = List.of(new MapperPlugin() { }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("hidden_index") ? HIDDEN_INDEX : FieldPredicate.ACCEPT_ALL; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> field -> field.equals("hidden_field") == false; + public Function getFieldFilter() { + return index -> HIDDEN_FIELD; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("filtered") ? ONLY_VISIBLE : FieldPredicate.ACCEPT_ALL; } }); IndicesModule indicesModule = new IndicesModule(mapperPlugins); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - Function> fieldFilter = mapperRegistry.getFieldFilter(); + Function fieldFilter = mapperRegistry.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); assertThat(fieldFilter.apply("hidden_index"), falseWith(randomAlphaOfLengthBetween(3, 5))); @@ -276,6 +277,10 @@ public Function> getFieldFilter() { assertThat(fieldFilter.apply("hidden_index"), falseWith("visible")); assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)), trueWith("visible")); assertThat(fieldFilter.apply("hidden_index"), falseWith("hidden_field")); + + assertThat(fieldFilter.apply("filtered").modifyHash("hash"), equalTo("only-visible:hide-field:hash")); + assertThat(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).modifyHash("hash"), equalTo("hide-field:hash")); + assertThat(fieldFilter.apply("hidden_index").modifyHash("hash"), equalTo("hide-field:hidden:hash")); } public void testDefaultFieldFilterIsNoOp() { @@ -286,7 +291,7 @@ public void testDefaultFieldFilterIsNoOp() { }); } IndicesModule indicesModule = new IndicesModule(mapperPlugins); - Function> fieldFilter = indicesModule.getMapperRegistry().getFieldFilter(); + Function fieldFilter = indicesModule.getMapperRegistry().getFieldFilter(); assertSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); } @@ -294,21 +299,72 @@ public void testNoOpFieldPredicate() { List mapperPlugins = Arrays.asList(new MapperPlugin() { }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("hidden_index") ? HIDDEN_INDEX : FieldPredicate.ACCEPT_ALL; } }, new MapperPlugin() { @Override - public Function> getFieldFilter() { - return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + public Function getFieldFilter() { + return index -> index.equals("filtered") ? ONLY_VISIBLE : FieldPredicate.ACCEPT_ALL; } }); IndicesModule indicesModule = new IndicesModule(mapperPlugins); MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); - Function> fieldFilter = mapperRegistry.getFieldFilter(); - assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 7))); - assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("hidden_index")); - assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("filtered")); + Function fieldFilter = mapperRegistry.getFieldFilter(); + assertSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply(randomAlphaOfLengthBetween(3, 7))); + assertNotSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("hidden_index")); + assertNotSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("filtered")); } + + private static final FieldPredicate HIDDEN_INDEX = new FieldPredicate() { + @Override + public boolean test(String field) { + return false; + } + + @Override + public String modifyHash(String hash) { + return "hidden:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; + + private static final FieldPredicate HIDDEN_FIELD = new FieldPredicate() { + @Override + public boolean test(String field) { + return false == field.equals("hidden_field"); + } + + @Override + public String modifyHash(String hash) { + return "hide-field:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; + + private static final FieldPredicate ONLY_VISIBLE = new FieldPredicate() { + @Override + public boolean test(String field) { + return field.equals("visible"); + } + + @Override + public String modifyHash(String hash) { + return "only-visible:" + hash; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java index 16cb0b4656fcf..61fc190e4952d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MockFieldFilterPlugin.java @@ -8,18 +8,32 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.core.Predicates; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import java.util.function.Function; -import java.util.function.Predicate; public class MockFieldFilterPlugin extends Plugin implements MapperPlugin { @Override - public Function> getFieldFilter() { + public Function getFieldFilter() { // this filter doesn't filter any field out, but it's used to exercise the code path executed when the filter is not no-op - return index -> Predicates.always(); + return index -> new FieldPredicate() { + @Override + public boolean test(String field) { + return true; + } + + @Override + public String modifyHash(String hash) { + return hash + ":includeall"; + } + + @Override + public long ramBytesUsed() { + return 0; + } + }; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java new file mode 100644 index 0000000000000..90ee353b46eaa --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicate.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.apache.lucene.util.automaton.Transition; +import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.plugins.FieldPredicate; + +import java.io.IOException; +import java.security.DigestOutputStream; +import java.security.MessageDigest; +import java.util.Base64; + +/** + * An implementation of {@link FieldPredicate} which matches fields + * against an {@link Automaton}. + */ +class AutomatonFieldPredicate implements FieldPredicate { + private final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(AutomatonFieldPredicate.class); + + private final String automatonHash; + private final CharacterRunAutomaton automaton; + + AutomatonFieldPredicate(Automaton originalAutomaton, CharacterRunAutomaton automaton) { + this.automatonHash = sha256(originalAutomaton); + this.automaton = automaton; + } + + @Override + public boolean test(String field) { + return automaton.run(field); + } + + @Override + public String modifyHash(String hash) { + return hash + ":" + automatonHash; + } + + @Override + public long ramBytesUsed() { + return SHALLOW_SIZE + RamUsageEstimator.sizeOf(automatonHash); // automaton itself is a shallow copy so not counted here + } + + private static String sha256(Automaton automaton) { + MessageDigest messageDigest = MessageDigests.sha256(); + try { + StreamOutput out = new OutputStreamStreamOutput(new DigestOutputStream(Streams.NULL_OUTPUT_STREAM, messageDigest)); + Transition t = new Transition(); + for (int state = 0; state < automaton.getNumStates(); state++) { + out.writeInt(state); + out.writeBoolean(automaton.isAccept(state)); + + int numTransitions = automaton.initTransition(state, t); + for (int i = 0; i < numTransitions; ++i) { + automaton.getNextTransition(t); + out.writeInt(t.dest); + out.writeInt(t.min); + out.writeInt(t.max); + } + } + } catch (IOException bogus) { + // cannot happen + throw new Error(bogus); + } + return Base64.getEncoder().encodeToString(messageDigest.digest()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 8f2088f55ade6..f3c2d9f62e40f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.xpack.core.security.authz.accesscontrol.FieldSubsetReader; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; import org.elasticsearch.xpack.core.security.authz.support.SecurityQueryTemplateEvaluator.DlsQueryEvaluationContext; @@ -67,6 +68,7 @@ public final class FieldPermissions implements Accountable, CacheKey { private final CharacterRunAutomaton permittedFieldsAutomaton; private final boolean permittedFieldsAutomatonIsTotal; private final Automaton originalAutomaton; + private final FieldPredicate fieldPredicate; private final long ramBytesUsed; @@ -106,6 +108,9 @@ private FieldPermissions(List fieldPermissionsDefini this.permittedFieldsAutomaton = new CharacterRunAutomaton(permittedFieldsAutomaton); // we cache the result of isTotal since this might be a costly operation this.permittedFieldsAutomatonIsTotal = Operations.isTotal(permittedFieldsAutomaton); + this.fieldPredicate = permittedFieldsAutomatonIsTotal + ? FieldPredicate.ACCEPT_ALL + : new AutomatonFieldPredicate(originalAutomaton, this.permittedFieldsAutomaton); long ramBytesUsed = BASE_FIELD_PERM_DEF_BYTES; ramBytesUsed += this.fieldPermissionsDefinitions.stream() @@ -113,6 +118,7 @@ private FieldPermissions(List fieldPermissionsDefini .sum(); ramBytesUsed += permittedFieldsAutomaton.ramBytesUsed(); ramBytesUsed += runAutomatonRamBytesUsed(permittedFieldsAutomaton); + ramBytesUsed += fieldPredicate.ramBytesUsed(); this.ramBytesUsed = ramBytesUsed; } @@ -220,6 +226,10 @@ public boolean grantsAccessTo(String fieldName) { return permittedFieldsAutomatonIsTotal || permittedFieldsAutomaton.run(fieldName); } + public FieldPredicate fieldPredicate() { + return fieldPredicate; + } + public List getFieldPermissionsDefinitions() { return fieldPermissionsDefinitions; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index bd267d19398b0..918976c0d3db8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -67,6 +67,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; @@ -454,8 +455,8 @@ public void onIndexModule(IndexModule indexModule) { } @Override - public Function> getFieldFilter() { - List>> items = filterPlugins(MapperPlugin.class).stream() + public Function getFieldFilter() { + List> items = filterPlugins(MapperPlugin.class).stream() .map(p -> p.getFieldFilter()) .filter(p -> p.equals(NOOP_FIELD_FILTER) == false) .toList(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java new file mode 100644 index 0000000000000..d62cbb7dbab6b --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/AutomatonFieldPredicateTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.permission; + +import org.apache.lucene.util.automaton.Automata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.CharacterRunAutomaton; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class AutomatonFieldPredicateTests extends ESTestCase { + public void testMatching() { + String str = randomAlphaOfLength(10); + Automaton a = Automata.makeString(str); + AutomatonFieldPredicate pred = new AutomatonFieldPredicate(a, new CharacterRunAutomaton(a)); + assertTrue(pred.test(str)); + assertFalse(pred.test(str + randomAlphaOfLength(1))); + } + + public void testHash() { + Automaton a = Automata.makeString("a"); + AutomatonFieldPredicate predA = new AutomatonFieldPredicate(a, new CharacterRunAutomaton(a)); + + Automaton b = Automata.makeString("b"); + AutomatonFieldPredicate predB = new AutomatonFieldPredicate(b, new CharacterRunAutomaton(b)); + + assertThat(predA.modifyHash("a"), not(equalTo(predB.modifyHash("a")))); + } +} diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index bb8163915c1c4..2dd64cf02446b 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql; import org.apache.http.HttpStatus; +import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -31,6 +32,9 @@ import java.util.Locale; import java.util.Map; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class EsqlSecurityIT extends ESRestTestCase { @@ -47,6 +51,7 @@ public class EsqlSecurityIT extends ESRestTestCase { .user("user3", "x-pack-test-password", "user3", false) .user("user4", "x-pack-test-password", "user4", false) .user("user5", "x-pack-test-password", "user5", false) + .user("fls_user", "x-pack-test-password", "fls_user", false) .build(); @Override @@ -62,7 +67,11 @@ protected Settings restClientSettings() { private void indexDocument(String index, int id, double value, String org) throws IOException { Request indexDoc = new Request("PUT", index + "/_doc/" + id); - indexDoc.setJsonEntity("{\"value\":" + value + ",\"org\":\"" + org + "\"}"); + XContentBuilder builder = JsonXContent.contentBuilder().startObject(); + builder.field("value", value); + builder.field("org", org); + builder.field("partial", org + value); + indexDoc.setJsonEntity(Strings.toString(builder.endObject())); client().performRequest(indexDoc); } @@ -85,6 +94,11 @@ public void indexDocuments() throws IOException { indexDocument("index-user2", 1, 32.0, "marketing"); indexDocument("index-user2", 2, 40.0, "sales"); refresh("index-user2"); + + createIndex("indexpartial", Settings.EMPTY, mapping); + indexDocument("indexpartial", 1, 32.0, "marketing"); + indexDocument("indexpartial", 2, 40.0, "sales"); + refresh("indexpartial"); } public void testAllowedIndices() throws Exception { @@ -122,7 +136,7 @@ public void testUnauthorizedIndices() throws IOException { assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400)); } - public void testDLS() throws Exception { + public void testDocumentLevelSecurity() throws Exception { Response resp = runESQLCommand("user3", "from index | stats sum=sum(value)"); assertOK(resp); Map respMap = entityAsMap(resp); @@ -130,6 +144,69 @@ public void testDLS() throws Exception { assertThat(respMap.get("values"), equalTo(List.of(List.of(10.0)))); } + public void testFieldLevelSecurityAllow() throws Exception { + Response resp = runESQLCommand("fls_user", "FROM index* | SORT value | LIMIT 1"); + assertOK(resp); + assertMap( + entityAsMap(resp), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "partial").entry("type", "text"), + matchesMap().entry("name", "value").entry("type", "double") + ) + ) + .entry("values", List.of(List.of("sales10.0", 10.0))) + ); + } + + public void testFieldLevelSecurityAllowPartial() throws Exception { + Request request = new Request("GET", "/index*/_field_caps"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", "fls_user")); + request.addParameter("error_trace", "true"); + request.addParameter("pretty", "true"); + request.addParameter("fields", "*"); + + request = new Request("GET", "/index*/_search"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", "fls_user")); + request.addParameter("error_trace", "true"); + request.addParameter("pretty", "true"); + + Response resp = runESQLCommand("fls_user", "FROM index* | SORT partial | LIMIT 1"); + assertOK(resp); + assertMap( + entityAsMap(resp), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "partial").entry("type", "text"), + matchesMap().entry("name", "value").entry("type", "double") + ) + ) + .entry("values", List.of(List.of("engineering20.0", 20.0))) + ); + } + + public void testFieldLevelSecuritySpellingMistake() throws Exception { + ResponseException e = expectThrows( + ResponseException.class, + () -> runESQLCommand("fls_user", "FROM index* | SORT parial | LIMIT 1") + ); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString("Unknown column [parial]")); + } + + public void testFieldLevelSecurityNotAllowed() throws Exception { + ResponseException e = expectThrows( + ResponseException.class, + () -> runESQLCommand("fls_user", "FROM index* | SORT org DESC | LIMIT 1") + ); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400)); + assertThat(EntityUtils.toString(e.getResponse().getEntity()), containsString("Unknown column [org]")); + } + public void testRowCommand() throws Exception { String user = randomFrom("test-admin", "user1", "user2"); Response resp = runESQLCommand(user, "row a = 5, b = 2 | stats count=sum(b) by a"); @@ -283,6 +360,7 @@ protected Response runESQLCommand(String user, String command) throws IOExceptio Request request = new Request("POST", "_query"); request.setJsonEntity(Strings.toString(json)); request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + request.addParameter("error_trace", "true"); return client().performRequest(request); } diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml index 7a89fa57f7102..7d134103afd28 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/resources/roles.yml @@ -51,9 +51,22 @@ user4: - names: ['index-user1', 'index', "test-enrich" ] privileges: - read + user5: cluster: [] indices: - names: ['index-user1', 'index', "test-enrich" ] privileges: - read + +fls_user: + cluster: [] + indices: + - names: [ 'index' ] + privileges: [ 'read' ] + field_security: + grant: [ value, partial ] + - names: [ 'indexpartial' ] + privileges: [ 'read' ] + field_security: + grant: [ value ] diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 83be62beab4ec..591b20bd82f47 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -48,6 +49,7 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; +import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.InternalSettingsPlugin; @@ -68,6 +70,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -77,6 +80,8 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; @@ -137,6 +142,9 @@ protected String configUsers() { + "\n" + "user9:" + usersPasswHashed + + "\n" + + "user_different_fields:" + + usersPasswHashed + "\n"; } @@ -150,7 +158,8 @@ protected String configUsersRoles() { role5:user4,user7 role6:user5,user7 role7:user6 - role8:user9"""; + role8:user9 + role_different_fields:user_different_fields"""; } @Override @@ -213,6 +222,16 @@ protected String configRoles() { privileges: [ ALL ] field_security: grant: [ 'field*', 'query' ] + role_different_fields: + indices: + - names: [ 'partial1*' ] + privileges: [ 'read' ] + field_security: + grant: [ value, partial ] + - names: [ 'partial2*' ] + privileges: [ 'read' ] + field_security: + grant: [ value ] """; } @@ -2336,4 +2355,49 @@ public void testLookupRuntimeFields() throws Exception { ); } + public void testSearchDifferentFieldsVisible() { + String firstName = "partial1" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + String secondName = "partial2" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + indexPartial(firstName, secondName); + SearchResponse response = client().filterWithHeader( + Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_different_fields", USERS_PASSWD)) + ).prepareSearch("partial*").addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)).get(); + try { + assertMap(response.getHits().getAt(0).getSourceAsMap(), matchesMap().entry("value", 1).entry("partial", 2)); + assertMap(response.getHits().getAt(1).getSourceAsMap(), matchesMap().entry("value", 2)); + } finally { + response.decRef(); + } + } + + /** + * The fields {@code partial} is only visible in one of the two backing indices and field caps should show it. + */ + public void testFieldCapsDifferentFieldsVisible() { + String firstName = "partial1_" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + String secondName = "partial2_" + randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + indexPartial(firstName, secondName); + FieldCapabilitiesResponse response = client().filterWithHeader( + Map.of(BASIC_AUTH_HEADER, basicAuthHeaderValue("user_different_fields", USERS_PASSWD)) + ).prepareFieldCaps("partial*").setFields("value", "partial").get(); + try { + assertThat(response.get().keySet(), equalTo(Set.of("value", "partial"))); + assertThat(response.getField("value").keySet(), equalTo(Set.of("long"))); + assertThat(response.getField("partial").keySet(), equalTo(Set.of("long"))); + } finally { + response.decRef(); + } + } + + private void indexPartial(String firstName, String secondName) { + BulkResponse bulkResponse = client().prepareBulk() + .add(client().prepareIndex(firstName).setSource("value", 1, "partial", 2)) + .add(client().prepareIndex(secondName).setSource("value", 2, "partial", 3)) + .setRefreshPolicy(IMMEDIATE) + .get(); + for (var i : bulkResponse.getItems()) { + assertThat(i.getFailure(), nullValue()); + assertThat(i.status(), equalTo(RestStatus.CREATED)); + } + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index ae6df838b4eac..5736d3e550f01 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -74,6 +74,7 @@ import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.NetworkPlugin; @@ -1947,29 +1948,29 @@ public UnaryOperator> getIndexTemplateMetadat } @Override - public Function> getFieldFilter() { + public Function getFieldFilter() { if (enabled) { return index -> { XPackLicenseState licenseState = getLicenseState(); IndicesAccessControl indicesAccessControl = threadContext.get() .getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); if (indicesAccessControl == null) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } assert indicesAccessControl.isGranted(); IndicesAccessControl.IndexAccessControl indexPermissions = indicesAccessControl.getIndexPermissions(index); if (indexPermissions == null) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } FieldPermissions fieldPermissions = indexPermissions.getFieldPermissions(); if (fieldPermissions.hasFieldLevelSecurity() == false) { - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } if (FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState) == false) { // check license last, once we know FLS is actually used - return MapperPlugin.NOOP_FIELD_PREDICATE; + return FieldPredicate.ACCEPT_ALL; } - return fieldPermissions::grantsAccessTo; + return fieldPermissions.fieldPredicate(); }; } return MapperPlugin.super.getFieldFilter(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 6a869377d7b07..f575bb6adc50e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.internal.XPackLicenseStatus; import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.internal.RestExtension; import org.elasticsearch.rest.RestHandler; @@ -120,7 +121,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -469,7 +469,7 @@ public void testJoinValidatorForFIPSOnForbiddenLicense() throws Exception { public void testGetFieldFilterSecurityEnabled() throws Exception { createComponents(Settings.EMPTY); - Function> fieldFilter = security.getFieldFilter(); + Function fieldFilter = security.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); Map permissionsMap = new HashMap<>(); @@ -491,9 +491,9 @@ public void testGetFieldFilterSecurityEnabled() throws Exception { assertThat(fieldFilter.apply("index_granted"), trueWith("field_granted")); assertThat(fieldFilter.apply("index_granted"), falseWith(randomAlphaOfLengthBetween(3, 10))); - assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_granted_all_permissions")); + assertEquals(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("index_granted_all_permissions")); assertThat(fieldFilter.apply("index_granted_all_permissions"), trueWith(randomAlphaOfLengthBetween(3, 10))); - assertEquals(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("index_other")); + assertEquals(FieldPredicate.ACCEPT_ALL, fieldFilter.apply("index_other")); } public void testGetFieldFilterSecurityDisabled() throws Exception { @@ -503,7 +503,7 @@ public void testGetFieldFilterSecurityDisabled() throws Exception { public void testGetFieldFilterSecurityEnabledLicenseNoFLS() throws Exception { createComponents(Settings.EMPTY); - Function> fieldFilter = security.getFieldFilter(); + Function fieldFilter = security.getFieldFilter(); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); licenseState.update( new XPackLicenseStatus( @@ -513,7 +513,7 @@ public void testGetFieldFilterSecurityEnabledLicenseNoFLS() throws Exception { ) ); assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); - assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6))); + assertSame(FieldPredicate.ACCEPT_ALL, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6))); } public void testValidateRealmsWhenSettingsAreInvalid() { From d09dce277eb271c68d1afedd76f44e498dc04ba7 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 26 Mar 2024 17:20:42 +0100 Subject: [PATCH 181/214] [Inference API] Move organization constant to OpenAiServiceFields and use constants. (#106772) --- .../services/openai/OpenAiServiceFields.java | 2 ++ .../OpenAiChatCompletionServiceSettings.java | 3 +-- .../OpenAiChatCompletionTaskSettings.java | 3 +-- .../OpenAiEmbeddingsRequestTaskSettings.java | 2 +- .../OpenAiEmbeddingsServiceSettings.java | 2 +- .../OpenAiEmbeddingsTaskSettings.java | 2 +- ...ChatCompletionRequestTaskSettingsTests.java | 7 +++---- ...enAiChatCompletionServiceSettingsTests.java | 9 +++++---- .../OpenAiChatCompletionTaskSettingsTests.java | 13 ++++++------- ...enAiEmbeddingsRequestTaskSettingsTests.java | 5 +++-- .../OpenAiEmbeddingsServiceSettingsTests.java | 17 +++++++++-------- .../OpenAiEmbeddingsTaskSettingsTests.java | 18 +++++++----------- 12 files changed, 40 insertions(+), 43 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java index 1e2353f901705..bafe1b031b028 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceFields.java @@ -11,4 +11,6 @@ public class OpenAiServiceFields { public static final String USER = "user"; + public static final String ORGANIZATION = "organization_id"; + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java index 0150d75b7037e..16b0ed5d47039 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettings.java @@ -31,6 +31,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; /** * Defines the service settings for interacting with OpenAI's chat completion models. @@ -39,8 +40,6 @@ public class OpenAiChatCompletionServiceSettings implements ServiceSettings { public static final String NAME = "openai_completion_service_settings"; - static final String ORGANIZATION = "organization_id"; - public static OpenAiChatCompletionServiceSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java index fb10d959087de..2d5a407f3c1a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettings.java @@ -22,13 +22,12 @@ import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; public class OpenAiChatCompletionTaskSettings implements TaskSettings { public static final String NAME = "openai_completion_task_settings"; - public static final String USER = "user"; - public static OpenAiChatCompletionTaskSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 5bdb0d7542a83..373704af37fcd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -16,7 +16,7 @@ import java.util.Map; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.USER; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; /** * This class handles extracting OpenAI task settings from a request. The difference between this class and diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 34713ff2b7208..01aa4f51799fb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -37,6 +37,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.ORGANIZATION; /** * Defines the service settings for interacting with OpenAI's text embedding models. @@ -45,7 +46,6 @@ public class OpenAiEmbeddingsServiceSettings implements ServiceSettings { public static final String NAME = "openai_service_settings"; - static final String ORGANIZATION = "organization_id"; static final String DIMENSIONS_SET_BY_USER = "dimensions_set_by_user"; public static OpenAiEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index a7b11487ca72f..e306f2d3d2928 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -23,6 +23,7 @@ import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields.USER; /** * Defines the task settings for the openai service. @@ -33,7 +34,6 @@ public class OpenAiEmbeddingsTaskSettings implements TaskSettings { public static final String NAME = "openai_embeddings_task_settings"; - public static final String USER = "user"; public static OpenAiEmbeddingsTaskSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java index 24632e120f94b..6fbdd3bf622d3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionRequestTaskSettingsTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.util.HashMap; import java.util.Map; @@ -28,9 +29,7 @@ public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() } public void testFromMap_ReturnsUser() { - var settings = OpenAiChatCompletionRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user")) - ); + var settings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } @@ -38,7 +37,7 @@ public static Map getChatCompletionRequestTaskSettingsMap(@Nulla var map = new HashMap(); if (user != null) { - map.put(OpenAiChatCompletionTaskSettings.USER, user); + map.put(OpenAiServiceFields.USER, user); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index 8778b2f13e746..ba2460f7bc09a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.io.IOException; import java.net.URI; @@ -40,7 +41,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, ServiceFields.URL, url, - OpenAiChatCompletionServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens @@ -61,7 +62,7 @@ public void testFromMap_MissingUrl_DoesNotThrowException() { Map.of( ServiceFields.MODEL_ID, modelId, - OpenAiChatCompletionServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, organization, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens @@ -109,7 +110,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiChatCompletionServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiChatCompletionServiceSettings.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")) + new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "model")) ) ); @@ -118,7 +119,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { containsString( org.elasticsearch.common.Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - OpenAiChatCompletionServiceSettings.ORGANIZATION + OpenAiServiceFields.ORGANIZATION ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java index 66a9ec371eb93..f2bd26a4e6432 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.io.IOException; import java.util.HashMap; @@ -27,14 +28,14 @@ public static OpenAiChatCompletionTaskSettings createRandomWithUser() { public void testFromMap_WithUser() { assertEquals( new OpenAiChatCompletionTaskSettings("user"), - OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))) + OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))) ); } public void testFromMap_UserIsEmptyString() { var thrownException = expectThrows( ValidationException.class, - () -> OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, ""))) + () -> OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, ""))) ); assertThat( @@ -49,7 +50,7 @@ public void testFromMap_MissingUser_DoesNotThrowException() { } public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { - var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))); + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of( taskSettings, @@ -59,11 +60,9 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { } public void testOverrideWith_UsesOverriddenSettings() { - var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user"))); + var taskSettings = OpenAiChatCompletionTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); - var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiChatCompletionTaskSettings.USER, "user2")) - ); + var requestTaskSettings = OpenAiChatCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user2"))); var overriddenTaskSettings = OpenAiChatCompletionTaskSettings.of(taskSettings, requestTaskSettings); assertThat(overriddenTaskSettings, is(new OpenAiChatCompletionTaskSettings("user2"))); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index 5a39fcb61ff0a..c95853e2d0128 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import java.util.HashMap; import java.util.Map; @@ -27,7 +28,7 @@ public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() } public void testFromMap_ReturnsUser() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user"))); + var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user"))); assertThat(settings.user(), is("user")); } @@ -35,7 +36,7 @@ public static Map getRequestTaskSettingsMap(@Nullable String use var map = new HashMap(); if (user != null) { - map.put(OpenAiEmbeddingsTaskSettings.USER, user); + map.put(OpenAiServiceFields.USER, user); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index 00cea6dc6ed21..e37318a0c96d4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.hamcrest.CoreMatchers; import java.io.IOException; @@ -79,7 +80,7 @@ public void testFromMap_Request_CreatesSettingsCorrectly() { modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -121,7 +122,7 @@ public void testFromMap_Request_DimensionsSetByUser_IsFalse_WhenDimensionsAreNot modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -162,7 +163,7 @@ public void testFromMap_Persistent_CreatesSettingsCorrectly() { modelId, ServiceFields.URL, url, - OpenAiEmbeddingsServiceSettings.ORGANIZATION, + OpenAiServiceFields.ORGANIZATION, org, ServiceFields.SIMILARITY, similarity, @@ -219,7 +220,7 @@ public void testFromMap_PersistentContext_ThrowsException_WhenDimensionsSetByUse public void testFromMap_MissingUrl_DoesNotThrowException() { var serviceSettings = OpenAiEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(ServiceFields.MODEL_ID, "m", OpenAiEmbeddingsServiceSettings.ORGANIZATION, "org")), + new HashMap<>(Map.of(ServiceFields.MODEL_ID, "m", OpenAiServiceFields.ORGANIZATION, "org")), ConfigurationParseContext.REQUEST ); assertNull(serviceSettings.uri()); @@ -260,7 +261,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsServiceSettings.ORGANIZATION, "", ServiceFields.MODEL_ID, "m")), + new HashMap<>(Map.of(OpenAiServiceFields.ORGANIZATION, "", ServiceFields.MODEL_ID, "m")), ConfigurationParseContext.REQUEST ) ); @@ -270,7 +271,7 @@ public void testFromMap_EmptyOrganization_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - OpenAiEmbeddingsServiceSettings.ORGANIZATION + OpenAiServiceFields.ORGANIZATION ) ) ); @@ -375,7 +376,7 @@ public static Map getServiceSettingsMap(String modelId, @Nullabl } if (org != null) { - map.put(OpenAiEmbeddingsServiceSettings.ORGANIZATION, org); + map.put(OpenAiServiceFields.ORGANIZATION, org); } return map; } @@ -395,7 +396,7 @@ public static Map getServiceSettingsMap( } if (org != null) { - map.put(OpenAiEmbeddingsServiceSettings.ORGANIZATION, org); + map.put(OpenAiServiceFields.ORGANIZATION, org); } if (dimensions != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index 6448b66d11cf3..c5a510ef9de0c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; +import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceFields; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -38,10 +39,7 @@ public static OpenAiEmbeddingsTaskSettings createRandom() { public void testFromMap_WithUser() { assertEquals( new OpenAiEmbeddingsTaskSettings("user"), - OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), - ConfigurationParseContext.REQUEST - ) + OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.REQUEST) ); } @@ -49,7 +47,7 @@ public void testFromMap_UserIsEmptyString() { var thrownException = expectThrows( ValidationException.class, () -> OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "")), ConfigurationParseContext.REQUEST ) ); @@ -67,7 +65,7 @@ public void testFromMap_MissingUser_DoesNotThrowException() { public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.PERSISTENT ); @@ -77,13 +75,11 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { public void testOverrideWith_UsesOverriddenSettings() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), + new HashMap<>(Map.of(OpenAiServiceFields.USER, "user")), ConfigurationParseContext.PERSISTENT ); - var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user2")) - ); + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceFields.USER, "user2"))); var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("user2"))); @@ -108,7 +104,7 @@ public static Map getTaskSettingsMap(@Nullable String user) { var map = new HashMap(); if (user != null) { - map.put(OpenAiEmbeddingsTaskSettings.USER, user); + map.put(OpenAiServiceFields.USER, user); } return map; From 59354e35e1606b2fdd25c42077923382de5b2510 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 26 Mar 2024 17:39:23 +0100 Subject: [PATCH 182/214] [Inference API] Add XContentUtilsTests and java docs to XContentUtils (#106770) --- .../external/response/XContentUtils.java | 7 ++ .../external/response/XContentUtilsTests.java | 86 +++++++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 4f4091873fba9..3511cbda1841b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -15,6 +15,13 @@ public class XContentUtils { + /** + * Moves to the first valid token, which is non-null. + * Does not move, if the parser is already positioned at a valid token. + * + * @param parser parser to move + * @throws IOException if underlying parser methods throw + */ public static void moveToFirstToken(XContentParser parser) throws IOException { if (parser.currentToken() == null) { parser.nextToken(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java new file mode 100644 index 0000000000000..c8de0371ab196 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Locale; + +public class XContentUtilsTests extends ESTestCase { + + public void testMoveToFirstToken() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertNull(parser.currentToken()); + + XContentUtils.moveToFirstToken(parser); + + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + } + } + + public void testMoveToFirstToken_DoesNotMoveIfAlreadyAtAToken() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + // position at a valid token + parser.nextToken(); + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + + XContentUtils.moveToFirstToken(parser); + + // still at the beginning of the object + assertEquals(XContentParser.Token.START_OBJECT, parser.currentToken()); + } + } + + public void testPositionParserAtTokenAfterField() throws IOException { + var json = """ + { + "key": "value" + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "key", "some error"); + + assertEquals("value", parser.text()); + } + } + + public void testPositionParserAtTokenAfterField_ThrowsIfFieldIsMissing() throws IOException { + var json = """ + { + "key": "value" + } + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + var exception = expectThrows( + IllegalStateException.class, + () -> XContentUtils.positionParserAtTokenAfterField(parser, missingField, errorFormat) + ); + + assertEquals(String.format(Locale.ROOT, errorFormat, missingField), exception.getMessage()); + } + } +} From cdb2e586403f0c2d84ad7931f49cb78f999ae3e8 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 26 Mar 2024 09:45:19 -0700 Subject: [PATCH 183/214] Track memory in rate aggregation function (#106730) We should track the memory usage of the individual state in the rate aggregation function. Relates #106703 --- .../gen/GroupingAggregatorImplementer.java | 2 +- .../aggregation/RateDoubleAggregator.java | 37 ++++++++++++++----- .../aggregation/RateIntAggregator.java | 37 ++++++++++++++----- .../aggregation/RateLongAggregator.java | 37 ++++++++++++++----- .../RateDoubleGroupingAggregatorFunction.java | 2 +- .../RateIntGroupingAggregatorFunction.java | 2 +- .../RateLongGroupingAggregatorFunction.java | 2 +- .../aggregation/X-RateAggregator.java.st | 37 ++++++++++++++----- 8 files changed, 112 insertions(+), 44 deletions(-) diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index 1be01f445691d..cb65d2337d588 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -100,7 +100,7 @@ public GroupingAggregatorImplementer( this.createParameters = init.getParameters() .stream() .map(Parameter::from) - .filter(f -> false == f.type().equals(BIG_ARRAYS)) + .filter(f -> false == f.type().equals(BIG_ARRAYS) && false == f.type().equals(DRIVER_CONTEXT)) .collect(Collectors.toList()); this.implementation = ClassName.get( diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index 016bf9387ca4b..a560eee4555e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.ann.GroupingAggregator; @@ -35,9 +36,9 @@ @IntermediateState(name = "resets", type = "DOUBLE") } ) public class RateDoubleAggregator { - public static DoubleRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { - // TODO: pass BlockFactory instead bigArrays so we can use the breaker - return new DoubleRateGroupingState(bigArrays, unitInMillis); + + public static DoubleRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new DoubleRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); } public static void combine(DoubleRateGroupingState current, int groupId, long timestamp, double value) { @@ -68,7 +69,7 @@ public static Block evaluateFinal(DoubleRateGroupingState state, IntVector selec return state.evaluateFinal(selected, driverContext.blockFactory()); } - private static class DoubleRateState implements Accountable { + private static class DoubleRateState { static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(DoubleRateState.class); final long[] timestamps; // descending order final double[] values; @@ -101,9 +102,10 @@ int entries() { return timestamps.length; } - @Override - public long ramBytesUsed() { - return BASE_RAM_USAGE; + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Double.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; } } @@ -111,9 +113,12 @@ public static final class DoubleRateGroupingState implements Releasable, Account private ObjectArray states; private final long unitInMillis; private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states - DoubleRateGroupingState(BigArrays bigArrays, long unitInMillis) { + DoubleRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { this.bigArrays = bigArrays; + this.breaker = breaker; this.states = bigArrays.newObjectArray(1); this.unitInMillis = unitInMillis; } @@ -122,16 +127,25 @@ void ensureCapacity(int groupId) { states = bigArrays.grow(states, groupId + 1); } + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + void append(int groupId, long timestamp, double value) { ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(DoubleRateState.bytesUsed(1)); state = new DoubleRateState(new long[] { timestamp }, new double[] { value }); states.set(groupId, state); } else { if (state.entries() == 1) { + adjustBreaker(DoubleRateState.bytesUsed(2)); state = new DoubleRateState(new long[] { state.timestamps[0], timestamp }, new double[] { state.values[0], value }); states.set(groupId, state); + adjustBreaker(-DoubleRateState.bytesUsed(1)); // old state } else { state.append(timestamp, value); } @@ -147,6 +161,7 @@ void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(DoubleRateState.bytesUsed(valueCount)); state = new DoubleRateState(valueCount); states.set(groupId, state); // TODO: add bulk_copy to Block @@ -155,9 +170,11 @@ void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset state.values[i] = values.getDouble(firstIndex + i); } } else { + adjustBreaker(DoubleRateState.bytesUsed(state.entries() + valueCount)); var newState = new DoubleRateState(state.entries() + valueCount); states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-DoubleRateState.bytesUsed(state.entries())); // old state } state.reset += reset; } @@ -193,12 +210,12 @@ void merge(DoubleRateState curr, DoubleRateState dst, int firstIndex, int rightC @Override public long ramBytesUsed() { - return states.ramBytesUsed(); + return states.ramBytesUsed() + stateBytes; } @Override public void close() { - Releasables.close(states); + Releasables.close(states, () -> adjustBreaker(-stateBytes)); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index fbf43f7d72c46..8a536a42a2dbe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.ann.GroupingAggregator; @@ -36,9 +37,9 @@ @IntermediateState(name = "resets", type = "DOUBLE") } ) public class RateIntAggregator { - public static IntRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { - // TODO: pass BlockFactory instead bigArrays so we can use the breaker - return new IntRateGroupingState(bigArrays, unitInMillis); + + public static IntRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new IntRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); } public static void combine(IntRateGroupingState current, int groupId, long timestamp, int value) { @@ -69,7 +70,7 @@ public static Block evaluateFinal(IntRateGroupingState state, IntVector selected return state.evaluateFinal(selected, driverContext.blockFactory()); } - private static class IntRateState implements Accountable { + private static class IntRateState { static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(IntRateState.class); final long[] timestamps; // descending order final int[] values; @@ -102,9 +103,10 @@ int entries() { return timestamps.length; } - @Override - public long ramBytesUsed() { - return BASE_RAM_USAGE; + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; } } @@ -112,9 +114,12 @@ public static final class IntRateGroupingState implements Releasable, Accountabl private ObjectArray states; private final long unitInMillis; private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states - IntRateGroupingState(BigArrays bigArrays, long unitInMillis) { + IntRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { this.bigArrays = bigArrays; + this.breaker = breaker; this.states = bigArrays.newObjectArray(1); this.unitInMillis = unitInMillis; } @@ -123,16 +128,25 @@ void ensureCapacity(int groupId) { states = bigArrays.grow(states, groupId + 1); } + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + void append(int groupId, long timestamp, int value) { ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(IntRateState.bytesUsed(1)); state = new IntRateState(new long[] { timestamp }, new int[] { value }); states.set(groupId, state); } else { if (state.entries() == 1) { + adjustBreaker(IntRateState.bytesUsed(2)); state = new IntRateState(new long[] { state.timestamps[0], timestamp }, new int[] { state.values[0], value }); states.set(groupId, state); + adjustBreaker(-IntRateState.bytesUsed(1)); // old state } else { state.append(timestamp, value); } @@ -148,6 +162,7 @@ void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, i ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(IntRateState.bytesUsed(valueCount)); state = new IntRateState(valueCount); states.set(groupId, state); // TODO: add bulk_copy to Block @@ -156,9 +171,11 @@ void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, i state.values[i] = values.getInt(firstIndex + i); } } else { + adjustBreaker(IntRateState.bytesUsed(state.entries() + valueCount)); var newState = new IntRateState(state.entries() + valueCount); states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-IntRateState.bytesUsed(state.entries())); // old state } state.reset += reset; } @@ -194,12 +211,12 @@ void merge(IntRateState curr, IntRateState dst, int firstIndex, int rightCount, @Override public long ramBytesUsed() { - return states.ramBytesUsed(); + return states.ramBytesUsed() + stateBytes; } @Override public void close() { - Releasables.close(states); + Releasables.close(states, () -> adjustBreaker(-stateBytes)); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index b5d0dfc8aabdb..eed95ab602db8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.ann.GroupingAggregator; @@ -35,9 +36,9 @@ @IntermediateState(name = "resets", type = "DOUBLE") } ) public class RateLongAggregator { - public static LongRateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { - // TODO: pass BlockFactory instead bigArrays so we can use the breaker - return new LongRateGroupingState(bigArrays, unitInMillis); + + public static LongRateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new LongRateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); } public static void combine(LongRateGroupingState current, int groupId, long timestamp, long value) { @@ -68,7 +69,7 @@ public static Block evaluateFinal(LongRateGroupingState state, IntVector selecte return state.evaluateFinal(selected, driverContext.blockFactory()); } - private static class LongRateState implements Accountable { + private static class LongRateState { static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject(LongRateState.class); final long[] timestamps; // descending order final long[] values; @@ -101,9 +102,10 @@ int entries() { return timestamps.length; } - @Override - public long ramBytesUsed() { - return BASE_RAM_USAGE; + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + return BASE_RAM_USAGE + ts + vs; } } @@ -111,9 +113,12 @@ public static final class LongRateGroupingState implements Releasable, Accountab private ObjectArray states; private final long unitInMillis; private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states - LongRateGroupingState(BigArrays bigArrays, long unitInMillis) { + LongRateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { this.bigArrays = bigArrays; + this.breaker = breaker; this.states = bigArrays.newObjectArray(1); this.unitInMillis = unitInMillis; } @@ -122,16 +127,25 @@ void ensureCapacity(int groupId) { states = bigArrays.grow(states, groupId + 1); } + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + void append(int groupId, long timestamp, long value) { ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(LongRateState.bytesUsed(1)); state = new LongRateState(new long[] { timestamp }, new long[] { value }); states.set(groupId, state); } else { if (state.entries() == 1) { + adjustBreaker(LongRateState.bytesUsed(2)); state = new LongRateState(new long[] { state.timestamps[0], timestamp }, new long[] { state.values[0], value }); states.set(groupId, state); + adjustBreaker(-LongRateState.bytesUsed(1)); // old state } else { state.append(timestamp, value); } @@ -147,6 +161,7 @@ void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker(LongRateState.bytesUsed(valueCount)); state = new LongRateState(valueCount); states.set(groupId, state); // TODO: add bulk_copy to Block @@ -155,9 +170,11 @@ void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, state.values[i] = values.getLong(firstIndex + i); } } else { + adjustBreaker(LongRateState.bytesUsed(state.entries() + valueCount)); var newState = new LongRateState(state.entries() + valueCount); states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-LongRateState.bytesUsed(state.entries())); // old state } state.reset += reset; } @@ -193,12 +210,12 @@ void merge(LongRateState curr, LongRateState dst, int firstIndex, int rightCount @Override public long ramBytesUsed() { - return states.ramBytesUsed(); + return states.ramBytesUsed() + stateBytes; } @Override public void close() { - Releasables.close(states); + Releasables.close(states, () -> adjustBreaker(-stateBytes)); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java index 608221614c483..8d9e011891e95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateDoubleGroupingAggregatorFunction.java @@ -49,7 +49,7 @@ public RateDoubleGroupingAggregatorFunction(List channels, public static RateDoubleGroupingAggregatorFunction create(List channels, DriverContext driverContext, long unitInMillis) { - return new RateDoubleGroupingAggregatorFunction(channels, RateDoubleAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + return new RateDoubleGroupingAggregatorFunction(channels, RateDoubleAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); } public static List intermediateStateDesc() { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java index df954d92a6d2a..6bd4b833dc9e6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateIntGroupingAggregatorFunction.java @@ -49,7 +49,7 @@ public RateIntGroupingAggregatorFunction(List channels, public static RateIntGroupingAggregatorFunction create(List channels, DriverContext driverContext, long unitInMillis) { - return new RateIntGroupingAggregatorFunction(channels, RateIntAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + return new RateIntGroupingAggregatorFunction(channels, RateIntAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); } public static List intermediateStateDesc() { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java index fb536465ed973..27318d6496737 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/RateLongGroupingAggregatorFunction.java @@ -49,7 +49,7 @@ public RateLongGroupingAggregatorFunction(List channels, public static RateLongGroupingAggregatorFunction create(List channels, DriverContext driverContext, long unitInMillis) { - return new RateLongGroupingAggregatorFunction(channels, RateLongAggregator.initGrouping(driverContext.bigArrays(), unitInMillis), driverContext, unitInMillis); + return new RateLongGroupingAggregatorFunction(channels, RateLongAggregator.initGrouping(driverContext, unitInMillis), driverContext, unitInMillis); } public static List intermediateStateDesc() { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index 9ace663fec990..86f5e058bd19c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -9,6 +9,7 @@ package org.elasticsearch.compute.aggregation; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.compute.ann.GroupingAggregator; @@ -38,9 +39,9 @@ import org.elasticsearch.core.Releasables; @IntermediateState(name = "resets", type = "DOUBLE") } ) public class Rate$Type$Aggregator { - public static $Type$RateGroupingState initGrouping(BigArrays bigArrays, long unitInMillis) { - // TODO: pass BlockFactory instead bigArrays so we can use the breaker - return new $Type$RateGroupingState(bigArrays, unitInMillis); + + public static $Type$RateGroupingState initGrouping(DriverContext driverContext, long unitInMillis) { + return new $Type$RateGroupingState(driverContext.bigArrays(), driverContext.breaker(), unitInMillis); } public static void combine($Type$RateGroupingState current, int groupId, long timestamp, $type$ value) { @@ -71,7 +72,7 @@ public class Rate$Type$Aggregator { return state.evaluateFinal(selected, driverContext.blockFactory()); } - private static class $Type$RateState implements Accountable { + private static class $Type$RateState { static final long BASE_RAM_USAGE = RamUsageEstimator.sizeOfObject($Type$RateState.class); final long[] timestamps; // descending order final $type$[] values; @@ -104,9 +105,10 @@ public class Rate$Type$Aggregator { return timestamps.length; } - @Override - public long ramBytesUsed() { - return BASE_RAM_USAGE; + static long bytesUsed(int entries) { + var ts = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Long.BYTES * entries); + var vs = RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) $BYTES$ * entries); + return BASE_RAM_USAGE + ts + vs; } } @@ -114,9 +116,12 @@ public class Rate$Type$Aggregator { private ObjectArray<$Type$RateState> states; private final long unitInMillis; private final BigArrays bigArrays; + private final CircuitBreaker breaker; + private long stateBytes; // for individual states - $Type$RateGroupingState(BigArrays bigArrays, long unitInMillis) { + $Type$RateGroupingState(BigArrays bigArrays, CircuitBreaker breaker, long unitInMillis) { this.bigArrays = bigArrays; + this.breaker = breaker; this.states = bigArrays.newObjectArray(1); this.unitInMillis = unitInMillis; } @@ -125,16 +130,25 @@ public class Rate$Type$Aggregator { states = bigArrays.grow(states, groupId + 1); } + void adjustBreaker(long bytes) { + breaker.addEstimateBytesAndMaybeBreak(bytes, "<>"); + stateBytes += bytes; + assert stateBytes >= 0 : stateBytes; + } + void append(int groupId, long timestamp, $type$ value) { ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker($Type$RateState.bytesUsed(1)); state = new $Type$RateState(new long[] { timestamp }, new $type$[] { value }); states.set(groupId, state); } else { if (state.entries() == 1) { + adjustBreaker($Type$RateState.bytesUsed(2)); state = new $Type$RateState(new long[] { state.timestamps[0], timestamp }, new $type$[] { state.values[0], value }); states.set(groupId, state); + adjustBreaker(-$Type$RateState.bytesUsed(1)); // old state } else { state.append(timestamp, value); } @@ -150,6 +164,7 @@ public class Rate$Type$Aggregator { ensureCapacity(groupId); var state = states.get(groupId); if (state == null) { + adjustBreaker($Type$RateState.bytesUsed(valueCount)); state = new $Type$RateState(valueCount); states.set(groupId, state); // TODO: add bulk_copy to Block @@ -158,9 +173,11 @@ public class Rate$Type$Aggregator { state.values[i] = values.get$Type$(firstIndex + i); } } else { + adjustBreaker($Type$RateState.bytesUsed(state.entries() + valueCount)); var newState = new $Type$RateState(state.entries() + valueCount); states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); + adjustBreaker(-$Type$RateState.bytesUsed(state.entries())); // old state } state.reset += reset; } @@ -196,12 +213,12 @@ public class Rate$Type$Aggregator { @Override public long ramBytesUsed() { - return states.ramBytesUsed(); + return states.ramBytesUsed() + stateBytes; } @Override public void close() { - Releasables.close(states); + Releasables.close(states, () -> adjustBreaker(-stateBytes)); } @Override From 69bf2be9f3251a5896e8c57f16cc3931cce93d72 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Mar 2024 13:03:24 -0400 Subject: [PATCH 184/214] Remove 8.12 from branches.json --- branches.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/branches.json b/branches.json index dc72956c13f80..772693505b9e0 100644 --- a/branches.json +++ b/branches.json @@ -7,9 +7,6 @@ { "branch": "8.13" }, - { - "branch": "8.12" - }, { "branch": "7.17" } From 0b3382cd240e0afd01d27a8740b57c3d2771a697 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 26 Mar 2024 11:00:38 -0700 Subject: [PATCH 185/214] Support ordinals grouping for rate aggregation (#106735) Add support for ordinal grouping in the rate aggregation function. Relates #106703 --- .../aggregation/RateDoubleAggregator.java | 54 ++++++++++++- .../aggregation/RateIntAggregator.java | 54 ++++++++++++- .../aggregation/RateLongAggregator.java | 54 ++++++++++++- .../aggregation/X-RateAggregator.java.st | 54 ++++++++++++- ...TimeSeriesSortedSourceOperatorFactory.java | 6 +- .../TimeSeriesSortedSourceOperatorTests.java | 80 +++++++++++++------ 6 files changed, 258 insertions(+), 44 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java index a560eee4555e2..2dc5b441ca00d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateDoubleAggregator.java @@ -24,6 +24,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import java.util.Arrays; + /** * A rate grouping aggregation definition for double. * This class is generated. Edit `X-RateAggregator.java.st` instead. @@ -59,10 +61,10 @@ public static void combineIntermediate( public static void combineStates( DoubleRateGroupingState current, int currentGroupId, // make the stylecheck happy - DoubleRateGroupingState state, - int statePosition + DoubleRateGroupingState otherState, + int otherGroupId ) { - throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + current.combineState(currentGroupId, otherState, otherGroupId); } public static Block evaluateFinal(DoubleRateGroupingState state, IntVector selected, DriverContext driverContext) { @@ -163,6 +165,7 @@ void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset if (state == null) { adjustBreaker(DoubleRateState.bytesUsed(valueCount)); state = new DoubleRateState(valueCount); + state.reset = reset; states.set(groupId, state); // TODO: add bulk_copy to Block for (int i = 0; i < valueCount; i++) { @@ -172,11 +175,11 @@ void combine(int groupId, LongBlock timestamps, DoubleBlock values, double reset } else { adjustBreaker(DoubleRateState.bytesUsed(state.entries() + valueCount)); var newState = new DoubleRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); adjustBreaker(-DoubleRateState.bytesUsed(state.entries())); // old state } - state.reset += reset; } void merge(DoubleRateState curr, DoubleRateState dst, int firstIndex, int rightCount, LongBlock timestamps, DoubleBlock values) { @@ -208,6 +211,49 @@ void merge(DoubleRateState curr, DoubleRateState dst, int firstIndex, int rightC } } + void combineState(int groupId, DoubleRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(DoubleRateState.bytesUsed(len)); + curr = new DoubleRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + DoubleRateState mergeState(DoubleRateState s1, DoubleRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(DoubleRateState.bytesUsed(newLen)); + var dst = new DoubleRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + @Override public long ramBytesUsed() { return states.ramBytesUsed() + stateBytes; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java index 8a536a42a2dbe..1ba8b9264c24a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateIntAggregator.java @@ -25,6 +25,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import java.util.Arrays; + /** * A rate grouping aggregation definition for int. * This class is generated. Edit `X-RateAggregator.java.st` instead. @@ -60,10 +62,10 @@ public static void combineIntermediate( public static void combineStates( IntRateGroupingState current, int currentGroupId, // make the stylecheck happy - IntRateGroupingState state, - int statePosition + IntRateGroupingState otherState, + int otherGroupId ) { - throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + current.combineState(currentGroupId, otherState, otherGroupId); } public static Block evaluateFinal(IntRateGroupingState state, IntVector selected, DriverContext driverContext) { @@ -164,6 +166,7 @@ void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, i if (state == null) { adjustBreaker(IntRateState.bytesUsed(valueCount)); state = new IntRateState(valueCount); + state.reset = reset; states.set(groupId, state); // TODO: add bulk_copy to Block for (int i = 0; i < valueCount; i++) { @@ -173,11 +176,11 @@ void combine(int groupId, LongBlock timestamps, IntBlock values, double reset, i } else { adjustBreaker(IntRateState.bytesUsed(state.entries() + valueCount)); var newState = new IntRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); adjustBreaker(-IntRateState.bytesUsed(state.entries())); // old state } - state.reset += reset; } void merge(IntRateState curr, IntRateState dst, int firstIndex, int rightCount, LongBlock timestamps, IntBlock values) { @@ -209,6 +212,49 @@ void merge(IntRateState curr, IntRateState dst, int firstIndex, int rightCount, } } + void combineState(int groupId, IntRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(IntRateState.bytesUsed(len)); + curr = new IntRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + IntRateState mergeState(IntRateState s1, IntRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(IntRateState.bytesUsed(newLen)); + var dst = new IntRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + @Override public long ramBytesUsed() { return states.ramBytesUsed() + stateBytes; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java index eed95ab602db8..846c6f0cc2730 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/RateLongAggregator.java @@ -24,6 +24,8 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import java.util.Arrays; + /** * A rate grouping aggregation definition for long. * This class is generated. Edit `X-RateAggregator.java.st` instead. @@ -59,10 +61,10 @@ public static void combineIntermediate( public static void combineStates( LongRateGroupingState current, int currentGroupId, // make the stylecheck happy - LongRateGroupingState state, - int statePosition + LongRateGroupingState otherState, + int otherGroupId ) { - throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + current.combineState(currentGroupId, otherState, otherGroupId); } public static Block evaluateFinal(LongRateGroupingState state, IntVector selected, DriverContext driverContext) { @@ -163,6 +165,7 @@ void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, if (state == null) { adjustBreaker(LongRateState.bytesUsed(valueCount)); state = new LongRateState(valueCount); + state.reset = reset; states.set(groupId, state); // TODO: add bulk_copy to Block for (int i = 0; i < valueCount; i++) { @@ -172,11 +175,11 @@ void combine(int groupId, LongBlock timestamps, LongBlock values, double reset, } else { adjustBreaker(LongRateState.bytesUsed(state.entries() + valueCount)); var newState = new LongRateState(state.entries() + valueCount); + newState.reset = state.reset + reset; states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); adjustBreaker(-LongRateState.bytesUsed(state.entries())); // old state } - state.reset += reset; } void merge(LongRateState curr, LongRateState dst, int firstIndex, int rightCount, LongBlock timestamps, LongBlock values) { @@ -208,6 +211,49 @@ void merge(LongRateState curr, LongRateState dst, int firstIndex, int rightCount } } + void combineState(int groupId, LongRateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker(LongRateState.bytesUsed(len)); + curr = new LongRateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + LongRateState mergeState(LongRateState s1, LongRateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker(LongRateState.bytesUsed(newLen)); + var dst = new LongRateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + @Override public long ramBytesUsed() { return states.ramBytesUsed() + stateBytes; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st index 86f5e058bd19c..ad305809c6651 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-RateAggregator.java.st @@ -27,6 +27,8 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import java.util.Arrays; + /** * A rate grouping aggregation definition for $type$. * This class is generated. Edit `X-RateAggregator.java.st` instead. @@ -62,10 +64,10 @@ public class Rate$Type$Aggregator { public static void combineStates( $Type$RateGroupingState current, int currentGroupId, // make the stylecheck happy - $Type$RateGroupingState state, - int statePosition + $Type$RateGroupingState otherState, + int otherGroupId ) { - throw new UnsupportedOperationException("ordinals grouping is not supported yet"); + current.combineState(currentGroupId, otherState, otherGroupId); } public static Block evaluateFinal($Type$RateGroupingState state, IntVector selected, DriverContext driverContext) { @@ -166,6 +168,7 @@ public class Rate$Type$Aggregator { if (state == null) { adjustBreaker($Type$RateState.bytesUsed(valueCount)); state = new $Type$RateState(valueCount); + state.reset = reset; states.set(groupId, state); // TODO: add bulk_copy to Block for (int i = 0; i < valueCount; i++) { @@ -175,11 +178,11 @@ public class Rate$Type$Aggregator { } else { adjustBreaker($Type$RateState.bytesUsed(state.entries() + valueCount)); var newState = new $Type$RateState(state.entries() + valueCount); + newState.reset = state.reset + reset; states.set(groupId, newState); merge(state, newState, firstIndex, valueCount, timestamps, values); adjustBreaker(-$Type$RateState.bytesUsed(state.entries())); // old state } - state.reset += reset; } void merge($Type$RateState curr, $Type$RateState dst, int firstIndex, int rightCount, LongBlock timestamps, $Type$Block values) { @@ -211,6 +214,49 @@ public class Rate$Type$Aggregator { } } + void combineState(int groupId, $Type$RateGroupingState otherState, int otherGroupId) { + var other = otherGroupId < otherState.states.size() ? otherState.states.get(otherGroupId) : null; + if (other == null) { + return; + } + ensureCapacity(groupId); + var curr = states.get(groupId); + if (curr == null) { + var len = other.entries(); + adjustBreaker($Type$RateState.bytesUsed(len)); + curr = new $Type$RateState(Arrays.copyOf(other.timestamps, len), Arrays.copyOf(other.values, len)); + curr.reset = other.reset; + states.set(groupId, curr); + } else { + states.set(groupId, mergeState(curr, other)); + } + } + + $Type$RateState mergeState($Type$RateState s1, $Type$RateState s2) { + var newLen = s1.entries() + s2.entries(); + adjustBreaker($Type$RateState.bytesUsed(newLen)); + var dst = new $Type$RateState(newLen); + dst.reset = s1.reset + s2.reset; + int i = 0, j = 0, k = 0; + while (i < s1.entries() && j < s2.entries()) { + if (s1.timestamps[i] > s2.timestamps[j]) { + dst.timestamps[k] = s1.timestamps[i]; + dst.values[k] = s1.values[i]; + ++i; + } else { + dst.timestamps[k] = s2.timestamps[j]; + dst.values[k] = s2.values[j]; + ++j; + } + ++k; + } + System.arraycopy(s1.timestamps, i, dst.timestamps, k, s1.entries() - i); + System.arraycopy(s1.values, i, dst.values, k, s1.entries() - i); + System.arraycopy(s2.timestamps, j, dst.timestamps, k, s2.entries() - j); + System.arraycopy(s2.values, j, dst.values, k, s2.entries() - j); + return dst; + } + @Override public long ramBytesUsed() { return states.ramBytesUsed() + stateBytes; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java index ad884538ac85f..855066fcb9da5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorFactory.java @@ -143,14 +143,11 @@ public Page getOutput() { } iterator.consume(); shard = blockFactory.newConstantIntBlockWith(iterator.slice.shardContext().index(), currentPagePos); - boolean singleSegmentNonDecreasing; if (iterator.slice.numLeaves() == 1) { - singleSegmentNonDecreasing = true; int segmentOrd = iterator.slice.getLeaf(0).leafReaderContext().ord; leaf = blockFactory.newConstantIntBlockWith(segmentOrd, currentPagePos).asVector(); } else { // Due to the multi segment nature of time series source operator singleSegmentNonDecreasing must be false - singleSegmentNonDecreasing = false; leaf = segmentsBuilder.build(); segmentsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); } @@ -161,10 +158,9 @@ public Page getOutput() { timestampIntervalBuilder = blockFactory.newLongVectorBuilder(Math.min(remainingDocs, maxPageSize)); tsids = tsOrdBuilder.build(); tsOrdBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); - page = new Page( currentPagePos, - new DocVector(shard.asVector(), leaf, docs, singleSegmentNonDecreasing).asBlock(), + new DocVector(shard.asVector(), leaf, docs, leaf.isConstant()).asBlock(), tsids.asBlock(), timestampIntervals.asBlock() ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 16340909a4fd3..b397d36837d01 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -43,14 +43,17 @@ import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; +import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.junit.After; @@ -285,17 +288,6 @@ record Doc(String pod, long timestamp, long requests) { return docs.size(); }); var ctx = driverContext(); - HashAggregationOperator initialHash = new HashAggregationOperator( - List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL)), - () -> BlockHash.build( - List.of(new HashAggregationOperator.GroupSpec(3, ElementType.BYTES_REF)), - ctx.blockFactory(), - randomIntBetween(1, 1000), - randomBoolean() - ), - ctx - ); - HashAggregationOperator finalHash = new HashAggregationOperator( List.of(new RateLongAggregatorFunctionSupplier(List.of(1, 2, 3), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL)), () -> BlockHash.build( @@ -309,20 +301,62 @@ record Doc(String pod, long timestamp, long requests) { List results = new ArrayList<>(); var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); var podField = new KeywordFieldMapper.KeywordFieldType("pod"); - OperatorTestCase.runDriver( - new Driver( - ctx, - sourceOperatorFactory.get(ctx), + if (randomBoolean()) { + HashAggregationOperator initialHash = new HashAggregationOperator( List.of( - ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), - ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), - initialHash, - finalHash + new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) ), - new TestResultPageSinkOperator(results::add), - () -> {} - ) - ); + () -> BlockHash.build( + List.of(new HashAggregationOperator.GroupSpec(3, ElementType.BYTES_REF)), + ctx.blockFactory(), + randomIntBetween(1, 1000), + randomBoolean() + ), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, podField, ElementType.BYTES_REF).get(ctx), + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + initialHash, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } else { + var blockLoader = new BlockDocValuesReader.BytesRefsFromOrdsBlockLoader("pod"); + var shardContext = new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE); + var ordinalGrouping = new OrdinalsGroupingOperator( + shardIdx -> blockLoader, + List.of(shardContext), + ElementType.BYTES_REF, + 0, + "pod", + List.of( + new RateLongAggregatorFunctionSupplier(List.of(3, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) + ), + randomIntBetween(1, 1000), + ctx + ); + OperatorTestCase.runDriver( + new Driver( + ctx, + sourceOperatorFactory.get(ctx), + List.of( + ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), + ordinalGrouping, + finalHash + ), + new TestResultPageSinkOperator(results::add), + () -> {} + ) + ); + } Map rates = new HashMap<>(); for (Page result : results) { BytesRefBlock keysBlock = result.getBlock(0); From 1c60a8dd8f0db70770642cb60cfb2bf1b573a980 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 26 Mar 2024 15:21:17 -0400 Subject: [PATCH 186/214] Forward port release notes for v8.13.0 (#106783) --- .../reference/migration/migrate_8_13.asciidoc | 116 ++++- docs/reference/release-notes/8.13.0.asciidoc | 439 +++++++++++++++++- .../release-notes/highlights.asciidoc | 10 + 3 files changed, 562 insertions(+), 3 deletions(-) diff --git a/docs/reference/migration/migrate_8_13.asciidoc b/docs/reference/migration/migrate_8_13.asciidoc index c2f431da388f1..c9e726d940b1d 100644 --- a/docs/reference/migration/migrate_8_13.asciidoc +++ b/docs/reference/migration/migrate_8_13.asciidoc @@ -16,5 +16,119 @@ coming::[8.13.0] [[breaking-changes-8.13]] === Breaking changes -There are no breaking changes in {es} 8.13. +The following changes in {es} 8.13 might affect your applications +and prevent them from operating normally. +Before upgrading to 8.13, review these changes and take the described steps +to mitigate the impact. + + +There are no notable breaking changes in {es} 8.13. +But there are some less critical breaking changes. + +[discrete] +[[breaking_813_index_setting_changes]] +==== Index setting changes + +[[change_index_look_ahead_time_index_settings_default_value_from_2_hours_to_30_minutes]] +.Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. +[%collapsible] +==== +*Details* + +Lower the `index.look_ahead_time` index setting's max value from 2 hours to 30 minutes. + +*Impact* + +Documents with @timestamp of 30 minutes or more in the future will be rejected. Before documents with @timestamp of 2 hours or more in the future were rejected. If the previous behaviour should be kept, then update the `index.look_ahead_time` setting to two hours before performing the upgrade. +==== + +[[lower_look_ahead_time_index_settings_max_value]] +.Lower the `look_ahead_time` index setting's max value +[%collapsible] +==== +*Details* + +Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. + +*Impact* + +Any value between 2 hours and 7 days will be as a look ahead time of 2 hours is defined +==== + +[discrete] +[[breaking_813_rest_api_changes]] +==== REST API changes + +[[esql_grammar_from_metadata_no_longer_requires]] +.ESQL: Grammar - FROM METADATA no longer requires [] +[%collapsible] +==== +*Details* + +Remove [ ] for METADATA option inside FROM command statements + +*Impact* + +Previously to return metadata fields, one had to use square brackets: (eg. 'FROM index [METADATA _index]'). This is no longer needed: the [ ] are dropped and do not have to be specified, thus simplifying the command above to:'FROM index METADATA _index'. +==== + +[[es_ql_remove_project_keyword_from_grammar]] +.ES|QL: remove PROJECT keyword from the grammar +[%collapsible] +==== +*Details* + +Removes the PROJECT keyword (an alias for KEEP) from ES|QL grammar + +*Impact* + +Before this change, users could use PROJECT as an alias for KEEP in ESQL queries, (eg. 'FROM idx | PROJECT name, surname') the parser replaced PROJECT with KEEP, emitted a warning: 'PROJECT command is no longer supported, please use KEEP instead' and the query was executed normally. With this change, PROJECT command is no longer recognized by the query parser; queries using PROJECT command now return a parsing exception. +==== + +[[esql_remove_nan_finite_infinite]] +.[ESQL] Remove is_nan, is_finite, and `is_infinite` +[%collapsible] +==== +*Details* + +Removes the functions `is_nan`, `is_finite`, and `is_infinite`. + +*Impact* + +Attempting to use the above functions will now be a planner time error. These functions are no longer supported. +==== + + +[discrete] +[[deprecated-8.13]] +=== Deprecations + +The following functionality has been deprecated in {es} 8.13 +and will be removed in a future version. +While this won't have an immediate impact on your applications, +we strongly encourage you to take the described steps to update your code +after upgrading to 8.13. + +To find out if you are using any deprecated functionality, +enable <>. + +[discrete] +[[deprecations_813_cluster_and_node_setting]] +==== Cluster and node setting deprecations + +[[deprecate_client_type]] +.Deprecate `client.type` +[%collapsible] +==== +*Details* + +The node setting `client.type` has been ignored since the node client was removed in 8.0. The setting is now deprecated and will be removed in a future release. + +*Impact* + +Remove the `client.type` setting from `elasticsearch.yml` +==== + +[discrete] +[[deprecations_813_rest_api]] +==== REST API deprecations + +[[desirednode_deprecate_node_version_field_make_it_optional_for_current_version]] +.`DesiredNode:` deprecate `node_version` field and make it optional for the current version +[%collapsible] +==== +*Details* + +The desired_node API includes a `node_version` field to perform validation on the new node version required. This kind of check is too broad, and it's better done by external logic, so it has been removed, making the `node_version` field not necessary. The field will be removed in a later version. + +*Impact* + +Users should update their usages of `desired_node` to not include the `node_version` field anymore. +==== diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 5b7d4f90f98de..2ef183374f167 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -1,8 +1,443 @@ [[release-notes-8.13.0]] == {es} version 8.13.0 -coming[8.13.0] - Also see <>. +[[breaking-8.13.0]] +[float] +=== Breaking changes + +ES|QL:: +* ESQL: Grammar - FROM METADATA no longer requires [] {es-pull}105221[#105221] +* ES|QL: remove PROJECT keyword from the grammar {es-pull}105064[#105064] +* [ESQL] Remove is_nan, is_finite, and `is_infinite` {es-pull}104091[#104091] + +TSDB:: +* Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. {es-pull}103898[#103898] +* Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. {es-pull}103434[#103434] + +[[bug-8.13.0]] +[float] +=== Bug fixes + +Aggregations:: +* Disable parallel collection for terms aggregation with `min_doc_count` equals to 0 {es-pull}106156[#106156] +* `GlobalOrdCardinalityAggregator` should use `HyperLogLogPlusPlus` instead of `HyperLogLogPlusPlusSparse` {es-pull}105546[#105546] + +Allocation:: +* Fix disk computation when initializing new shards {es-pull}102879[#102879] +* Fix disk computation when initializing unassigned shards in desired balance computation {es-pull}102207[#102207] + +Application:: +* Fix Search Applications bug where deleting an alias before deleting an application intermittently caused errors {es-pull}106329[#106329] +* Use search to determine if cluster contains data {es-pull}103920[#103920] +* [Connector API] Bugfix: support list type in filtering advenced snippet value {es-pull}105633[#105633] +* [Connector API] Fix default ordering in `SyncJob` list endpoint {es-pull}105945[#105945] +* [Connector API] Fix serialisation of script params in connector index service {es-pull}106060[#106060] + +Authentication:: +* Execute SAML authentication on the generic threadpool {es-pull}105232[#105232] (issue: {es-issue}104962[#104962]) + +Authorization:: +* Adjust interception of requests for specific shard IDs {es-pull}101656[#101656] + +Client:: +* Validate settings in `ReloadSecureSettings` API {es-pull}103176[#103176] + +Data streams:: +* Apm-data: fix `@custom` component templates {es-pull}104182[#104182] +* Avoid false-positive matches on intermediate objects in `ecs@mappings` {es-pull}105440[#105440] (issue: {es-issue}102794[#102794]) +* Execute lazy rollover with an internal dedicated user #104732 {es-pull}104905[#104905] (issue: {es-issue}104732[#104732]) +* Fix write index resolution when an alias is pointing to a TSDS {es-pull}104440[#104440] (issue: {es-issue}104189[#104189]) +* x-pack/plugin/core: add `match_mapping_type` to `ecs@mappings` dynamic templates {es-pull}103035[#103035] + +Distributed:: +* Fix logger Strings.format calls {es-pull}104573[#104573] +* Request indexing memory pressure in APM node metrics publisher {es-pull}103520[#103520] + +ES|QL:: +* ESQL: Add single value checks on LIKE/RLIKE pushdown {es-pull}103807[#103807] (issue: {es-issue}103806[#103806]) +* ESQL: Correct out-of-range filter pushdowns {es-pull}99961[#99961] (issue: {es-issue}99960[#99960]) +* ESQL: Fix Analyzer to not interpret escaped * as a pattern {es-pull}105325[#105325] (issue: {es-issue}104955[#104955]) +* ESQL: Fix a bug loading unindexed text fields {es-pull}104553[#104553] +* ESQL: Fix bug in grammar that allowed spaces inside id pattern {es-pull}105476[#105476] (issue: {es-issue}105441[#105441]) +* ESQL: Fix replacement of nested expressions in aggs with multiple parameters {es-pull}104718[#104718] (issue: {es-issue}104706[#104706]) +* ESQL: Fix wrong attribute shadowing in pushdown rules {es-pull}105650[#105650] (issue: {es-issue}105434[#105434]) +* ESQL: Improve pushdown of certain filters {es-pull}103538[#103538] (issue: {es-issue}103536[#103536]) +* ESQL: allow `null` in date math {es-pull}103610[#103610] (issue: {es-issue}103085[#103085]) +* ESQL: make `cidr_match` foldable {es-pull}105403[#105403] (issue: {es-issue}105376[#105376]) +* ES|QL: Disable optimizations that rely on Expression.nullable() {es-pull}105691[#105691] +* ES|QL: Improve type validation in aggs for UNSIGNED_LONG better support for VERSION {es-pull}104911[#104911] (issue: {es-issue}102961[#102961]) +* ES|QL: better management of exact subfields for TEXT fields {es-pull}103510[#103510] (issue: {es-issue}99899[#99899]) +* Fix error on sorting unsortable `geo_point` and `cartesian_point` {es-pull}106351[#106351] (issue: {es-issue}106007[#106007]) +* For empty mappings use a `LocalRelation` {es-pull}105081[#105081] (issue: {es-issue}104809[#104809]) +* Resume driver when failing to fetch pages {es-pull}106392[#106392] (issue: {es-issue}106262[#106262]) +* Review KEEP logic to prevent duplicate column names {es-pull}103316[#103316] +* `ProjectOperator` should not retain references to released blocks {es-pull}105848[#105848] + +Engine:: +* Consider currently refreshing data in the memory usage of refresh {es-pull}104122[#104122] +* Release `TranslogSnapshot` buffer after iteration {es-pull}106398[#106398] (issue: {es-issue}106390[#106390]) + +Health:: +* Make Health API more resilient to multi-version clusters {es-pull}105789[#105789] (issue: {es-issue}90183[#90183]) +* Stop the periodic health logger when es is stopping {es-pull}105272[#105272] + +ILM+SLM:: +* Remove `hashCode` and `equals` from `OperationModeUpdateTask` {es-pull}104265[#104265] (issue: {es-issue}100871[#100871]) +* [ILM] Delete step deletes data stream with only one index {es-pull}105772[#105772] + +Indices APIs:: +* Fix `require_alias` implicit true value on presence {es-pull}104099[#104099] (issue: {es-issue}103945[#103945]) + +Infra/CLI:: +* Fix server cli to always pass through exit code {es-pull}104943[#104943] + +Infra/Core:: +* Do not enable APM agent 'instrument', it's not required for manual tracing {es-pull}105055[#105055] +* Fix bogus assertion tripped by force-executed tasks {es-pull}104581[#104581] (issue: {es-issue}104580[#104580]) +* Metrics: Allow `AsyncCounters` to switch providers {es-pull}103025[#103025] +* Metrics: Handle null observations in observers {es-pull}103091[#103091] + +Infra/Node Lifecycle:: +* Close rather than stop `HttpServerTransport` on shutdown {es-pull}102759[#102759] (issue: {es-issue}102501[#102501]) + +Ingest Node:: +* Add stable `ThreadPool` constructor to `LogstashInternalBridge` {es-pull}105163[#105163] +* Adding `executedPipelines` to the `IngestDocument` copy constructor {es-pull}105427[#105427] +* Revert "x-pack/plugin/apm-data: download geoip DB on pipeline creation" {es-pull}104505[#104505] +* X-pack/plugin/apm-data: fix `@custom` pipeline support {es-pull}104113[#104113] + +Machine Learning:: +* Allow GET inference models by user a with read only permission {es-pull}105346[#105346] +* Avoid computing `currentInferenceProcessors` on every cluster state {es-pull}106057[#106057] +* Catch all the potential exceptions in the ingest processor code {es-pull}105391[#105391] +* Changed system auditor to use levels {es-pull}105429[#105429] +* During ML maintenance, reset jobs in the reset state without a corresponding task {es-pull}106062[#106062] +* Fix `categorize_text` aggregation nested under empty buckets {es-pull}105987[#105987] (issue: {es-issue}105836[#105836]) +* Fix resetting a job if the original reset task no longer exists. {es-pull}106020[#106020] +* Retry updates to model snapshot ID on job config {es-pull}104077[#104077] +* The OpenAI model parameter should be in service settings not task settings. Move the configuration field to service settings {es-pull}105458[#105458] +* Undeploy elser when inference model deleted {es-pull}104230[#104230] + +Mapping:: +* Fix parsing of flattened fields within subobjects: false {es-pull}105373[#105373] + +Network:: +* Fix use-after-free at event-loop shutdown {es-pull}105486[#105486] + +Search:: +* Correct profiled rewrite time for knn with a pre-filter {es-pull}104150[#104150] +* Force execution of `SearchService.Reaper` {es-pull}106544[#106544] (issue: {es-issue}106543[#106543]) +* Move `TransportTermsEnumAction` coordination off transport threads {es-pull}104408[#104408] +* Remove `SearchException` usages without a proper status code {es-pull}105150[#105150] +* Require the name field for `inner_hits` for collapse {es-pull}104666[#104666] +* add validation on _id field when upsert new doc {es-pull}103399[#103399] (issue: {es-issue}102981[#102981]) + +Security:: +* Revert "Validate settings in `ReloadSecureSettings` API" {es-pull}103310[#103310] + +Snapshot/Restore:: +* Do not record s3 http request time when it is not available {es-pull}105103[#105103] +* `URLRepository` should not block shutdown {es-pull}105588[#105588] + +TLS:: +* Respect --pass option in certutil csr mode {es-pull}106105[#106105] + +Transform:: +* Fix `_reset` API when called with `force=true` on a failed transform {es-pull}106574[#106574] (issue: {es-issue}106573[#106573]) +* Fix a bug where destination index aliases are not set up for an unattended transform {es-pull}105499[#105499] +* Remove duplicate checkpoint audits {es-pull}105164[#105164] (issue: {es-issue}105106[#105106]) +* Return results in order {es-pull}105089[#105089] (issue: {es-issue}104847[#104847]) +* Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` {es-pull}103682[#103682] (issue: {es-issue}103115[#103115]) + +Vector Search:: +* Fix bug when nested knn pre-filter might match nested docs {es-pull}105994[#105994] + +Watcher:: +* Handling exceptions on watcher reload {es-pull}105442[#105442] (issue: {es-issue}69842[#69842]) + +[[deprecation-8.13.0]] +[float] +=== Deprecations + +Distributed:: +* `DesiredNode:` deprecate `node_version` field and make it optional (unused) in current parser {es-pull}104209[#104209] + +Infra/Core:: +* Deprecate `client.type` {es-pull}104574[#104574] + +[[enhancement-8.13.0]] +[float] +=== Enhancements + +Aggregations:: +* Add index mapping parameter for `counted_keyword` {es-pull}103646[#103646] +* Introduce an `AggregatorReducer` to reduce the footprint of aggregations in the coordinating node {es-pull}105207[#105207] +* Release resources in `BestBucketsDeferringCollector` earlier {es-pull}104893[#104893] +* Support sampling in `counted_terms` aggregation {es-pull}103846[#103846] + +Allocation:: +* Account for reserved disk size {es-pull}103903[#103903] +* Derive expected replica size from primary {es-pull}102078[#102078] + +Application:: +* Add serverless scopes for Connector APIs {es-pull}104063[#104063] +* [Connector API] Change required privileges to indices:data/read(write) {es-pull}105289[#105289] +* [Connector API] Implement update `index_name` action {es-pull}104648[#104648] +* [Connector API] Support filtering by name, index name in list action {es-pull}105131[#105131] +* [Connector API] Support filtering connectors by service type and a query {es-pull}105178[#105178] +* [Connector API] Support updating configuration values only {es-pull}105249[#105249] +* [Connectors API] Add new field `api_key_secret_id` to Connector {es-pull}104982[#104982] +* [Connectors API] Implement connector status update action {es-pull}104750[#104750] +* [Connectors API] Implement update native action endpoint {es-pull}104654[#104654] +* [Connectors API] Implement update service type action {es-pull}104643[#104643] +* [Connectors API] Relax strict response parsing for get/list operations {es-pull}104909[#104909] +* [Profiling] Extract properties faster from source {es-pull}104356[#104356] +* [Profiling] Mark all templates as managed {es-pull}103783[#103783] +* [Profiling] Speed up processing of stacktraces {es-pull}104674[#104674] +* [Profiling] Support downsampling of generic events {es-pull}104730[#104730] +* [Profiling] Use shard request cache consistently {es-pull}103643[#103643] + +Authentication:: +* Expose API key authentication metrics {es-pull}103178[#103178] +* Expose realms authentication metrics {es-pull}104200[#104200] +* Expose service account authentication metrics {es-pull}104043[#104043] +* Expose token authentication metrics {es-pull}104142[#104142] +* Hot-reloadable LDAP bind password {es-pull}104320[#104320] +* Support of `match` for the Query API Key API {es-pull}104594[#104594] + +Authorization:: +* [Security Solution] Allow write permission for `kibana_system` role on endpoint response index {es-pull}103555[#103555] + +CRUD:: +* Avoid wrapping searchers multiple times in mget {es-pull}104227[#104227] (issue: {es-issue}85069[#85069]) + +Client:: +* Add rest spec for Query User API {es-pull}104529[#104529] + +Cluster Coordination:: +* Add troubleshooting docs link to `PeerFinder` logs {es-pull}104787[#104787] +* Report current master in `PeerFinder` {es-pull}104396[#104396] + +Data streams:: +* Introduce lazy rollover for mapping updates in data streams {es-pull}103309[#103309] (issue: {es-issue}89346[#89346]) +* Use new `ignore_dynamic_beyond_limit` in logs and metric data streams {es-pull}105180[#105180] +* X-pack/plugin/apm-data: add dynamic setting for enabling template registry {es-pull}104386[#104386] (issue: {es-issue}104385[#104385]) +* X-pack/plugin/core: rename `double_metrics` template {es-pull}103033[#103033] +* x-pack/plugin/apm-data: Add a new field transaction.profiler_stack_trace_ids to traces-apm@mappings.yaml {es-pull}105223[#105223] +* x-pack/plugin/apm-data: Map some APM fields as flattened and fix error.grouping_name script {es-pull}103032[#103032] +* x-pack/plugin/core: make automatic rollovers lazy {es-pull}105273[#105273] (issue: {es-issue}104083[#104083]) + +Discovery-Plugins:: +* Set read timeout for fetching IMDSv2 token {es-pull}104407[#104407] (issue: {es-issue}104244[#104244]) + +Downsampling:: +* Support patch transport version from 8.12 {es-pull}104406[#104406] + +ES|QL:: +* Add ES|QL async delete API {es-pull}103628[#103628] +* Avoid humongous blocks {es-pull}103340[#103340] +* ESQL: Add TO_UPPER and TO_LOWER functions {es-pull}104309[#104309] +* ESQL: Add option to drop null fields {es-pull}102428[#102428] +* ESQL: Add plan consistency verification after each optimizer {es-pull}105371[#105371] +* ESQL: Check field exists before load from `_source` {es-pull}103632[#103632] +* ESQL: Delay finding field load infrastructure {es-pull}103821[#103821] +* ESQL: Expand shallow copy with vecs {es-pull}103681[#103681] (issue: {es-issue}100528[#100528]) +* ESQL: Extend STATS command to support aggregate expressions {es-pull}104958[#104958] +* ESQL: Infer not null for aggregated fields {es-pull}103673[#103673] (issue: {es-issue}102787[#102787]) +* ESQL: Nested expressions inside stats command {es-pull}104387[#104387] (issue: {es-issue}99828[#99828]) +* ESQL: Pre-allocate rows in TopNOperator {es-pull}104796[#104796] +* ESQL: Referencing expressions that contain backticks requires <>. {es-pull}100740[#100740] (issue: {es-issue}100312[#100312]) +* ESQL: Simpify IS NULL/IS NOT NULL evaluation {es-pull}103099[#103099] (issue: {es-issue}103097[#103097]) +* ESQL: Speed up reading many nulls {es-pull}105088[#105088] +* ESQL: Support loading shapes from source into WKB blocks {es-pull}104269[#104269] +* ESQL: Track the rest of `DocVector` {es-pull}103727[#103727] +* ESQL: `MV_FIRST` and `MV_LAST` {es-pull}103928[#103928] +* ESQL: add `date_diff` function {es-pull}104118[#104118] (issue: {es-issue}101942[#101942]) +* ESQL: push down "[text_field] is not null" {es-pull}105593[#105593] +* ES|QL Async Query API {es-pull}103398[#103398] +* Prepare enrich plan to support multi clusters {es-pull}104355[#104355] +* Reading points from source to reduce precision loss {es-pull}103698[#103698] +* Remove deprecated Block APIs {es-pull}103592[#103592] +* Reserve bytes before serializing page {es-pull}105269[#105269] +* Support ST_CENTROID over spatial points {es-pull}104218[#104218] (issue: {es-issue}104656[#104656]) +* Support cross clusters query in ESQL {es-pull}101640[#101640] +* Support enrich ANY mode in cross clusters query {es-pull}104840[#104840] +* Support enrich coordinator mode {es-pull}104936[#104936] +* Support enrich remote mode {es-pull}104993[#104993] + +Geo:: +* Add support for Well Known Binary (WKB) in the fields API for spatial fields {es-pull}103461[#103461] +* Add the possibility to transform WKT to WKB directly {es-pull}104030[#104030] + +Health:: +* Add APM metrics to `HealthPeriodicLogger` {es-pull}102765[#102765] +* Extend `repository_integrity` health indicator for unknown and invalid repos {es-pull}104614[#104614] (issue: {es-issue}103784[#103784]) + +ILM+SLM:: +* Add "step":"ERROR" to ILM explain response for missing policy {es-pull}103720[#103720] (issue: {es-issue}99030[#99030]) +* Add default rollover conditions to ILM explain API response {es-pull}104721[#104721] (issue: {es-issue}103395[#103395]) +* ILM/SLM history policies forcemerge in hot and dsl configuration {es-pull}103190[#103190] + +Infra/CLI:: +* Add replay diagnostic dir to system jvm options {es-pull}103535[#103535] + +Infra/Circuit Breakers:: +* Lower G1 minimum full GC interval {es-pull}105259[#105259] + +Infra/Core:: +* Adding threadpool metrics {es-pull}102371[#102371] +* ES - document observing with rejections {es-pull}104859[#104859] +* Thread pool metrics {es-pull}104500[#104500] + +Infra/Metrics:: +* Modify name of threadpool metric for rejected {es-pull}105015[#105015] + +Infra/Node Lifecycle:: +* Wait for async searches to finish when shutting down {es-pull}101487[#101487] + +Infra/Transport API:: +* Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return `ParentTaskAssigningClient` {es-pull}100813[#100813] + +Ingest Node:: +* Adding `ActionRequestLazyBuilder` implementation of `RequestBuilder` {es-pull}104927[#104927] +* Adding a `RequestBuilder` interface {es-pull}104778[#104778] +* Adding a custom exception for problems with the graph of pipelines to be applied to a document {es-pull}105196[#105196] +* Improving the performance of the ingest simulate verbose API {es-pull}105265[#105265] +* Ingest geoip processor cache 'no results' from the database {es-pull}104092[#104092] +* Limiting the number of nested pipelines that can be executed {es-pull}105428[#105428] +* Modifying request builders {es-pull}104636[#104636] + +Java Low Level REST Client:: +* Set thread name used by REST client {es-pull}103160[#103160] + +Machine Learning:: +* Add optional pruning configuration (weighted terms scoring) to text expansion query {es-pull}102862[#102862] +* Add text_embedding inference service with multilingual-e5 and custom eland models {es-pull}104949[#104949] +* Add 3 automatic restarts for `pytorch_inference` processes that stop unexpectedly {es-pull}104433[#104433] +* Add support for Cohere inference service {es-pull}104559[#104559] +* Always test for spikes and dips as well as changes in the change point aggregation {es-pull}103922[#103922] +* Apply windowing and chunking to long documents {es-pull}104363[#104363] +* Automatically download the ELSER model when PUT in `_inference` {es-pull}104334[#104334] +* Better handling of number of allocations in pytorch_inference in the case that hardware_concurrency fails {ml-pull}2607[#2607] +* Change detection aggregation improvements {es-pull}102824[#102824] +* Conditionally send the dimensions field as part of the openai requests {es-pull}105299[#105299] (issue: {es-issue}105005[#105005]) +* Endpoint to find positions of Grok pattern matches {es-pull}104394[#104394] +* Ensure unique IDs between inference models and trained model deployments {es-pull}103996[#103996] +* Expose some ML metrics via APM {es-pull}102584[#102584] +* Make `task_type` optional in `_inference` APIs {es-pull}104483[#104483] +* Update `missingTrainedModel` message to include: you may need to create it {es-pull}104155[#104155] +* Upgrade MKL to version 2024.0 on Linux x86_64 {ml-pull}2619[#2619] +* Upgrade PyTorch to version 2.1.2. {ml-pull}2588[#2588] +* Upgrade zlib to version 1.2.13 on Windows {ml-pull}2588[#2588] +* Use Boost.JSON for JSON processing {ml-pull}2614[#2614] +* Validate inference model ids {es-pull}103669[#103669] + + +Mapping:: +* Add `index.mapping.total_fields.ignore_dynamic_beyond_limit` setting to ignore dynamic fields when field limit is reached {es-pull}96235[#96235] +* Make field limit more predictable {es-pull}102885[#102885] + +Network:: +* Prune unnecessary information from TransportNodesStatsAction.NodeStatsRequest {es-pull}102559[#102559] (issue: {es-issue}100878[#100878]) + +Percolator:: +* Return `matched_queries` in Percolator {es-pull}103084[#103084] (issue: {es-issue}10163[#10163]) + +Query Languages:: +* Introduce Alias.unwrap method {es-pull}104575[#104575] + +Search:: +* Dyamically adjust node metrics cache expire {es-pull}104460[#104460] +* Enhancement: Metrics for Search Took Times using Action Listeners {es-pull}104996[#104996] +* Field caps performance pt2 {es-pull}105941[#105941] +* Field-caps field has value lookup use map instead of looping array {es-pull}105770[#105770] +* Flag in `_field_caps` to return only fields with values in index {es-pull}103651[#103651] +* Include better output in profiling & `toString` for automaton based queries {es-pull}105468[#105468] +* Metrics for search latencies {es-pull}102557[#102557] +* Ref count search response bytes {es-pull}103763[#103763] (issue: {es-issue}102657[#102657]) +* Remove leniency in msearch parsing {es-pull}103232[#103232] +* Resolve Cluster API {es-pull}102726[#102726] +* Reuse number field mapper tests in other modules {es-pull}99142[#99142] (issue: {es-issue}92947[#92947]) +* S3 first byte latency metric {es-pull}102435[#102435] +* Update s3 latency metric to use micros {es-pull}103633[#103633] +* Upgrade to Lucene 9.10.0 {es-pull}105578[#105578] + +Security:: +* Add Query Users API {es-pull}104033[#104033] +* Add `ApiKey` expiration time to audit log {es-pull}103959[#103959] +* Add expiration time to update api key api {es-pull}103453[#103453] +* Add stricter validation for api key expiration time {es-pull}103973[#103973] +* Add support for the `simple_query_string` to the Query API Key API {es-pull}104132[#104132] +* Add support for the `type` parameter, for sorting, to the Query API Key API {es-pull}104625[#104625] +* Aggs support for Query API Key Information API {es-pull}104895[#104895] +* Hot-reloadable remote cluster credentials {es-pull}102798[#102798] + +Snapshot/Restore:: +* Add s3 `HeadObject` request to request stats {es-pull}105105[#105105] +* Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs {es-pull}105044[#105044] +* Fix blob cache race, decay, time dependency {es-pull}104784[#104784] +* Pause shard snapshots on graceful shutdown {es-pull}101717[#101717] +* Retry indefinitely for s3 indices blob read errors {es-pull}103300[#103300] + +Store:: +* List hidden shard stores by default {es-pull}103710[#103710] + +TLS:: +* 'elasticsearch-certutil cert' now verifies the issuing chain of the generated certificate {es-pull}103948[#103948] + +TSDB:: +* Improve storage efficiency for non-metric fields in TSDB {es-pull}99747[#99747] +* Introduce experimental pass-through field type {es-pull}103648[#103648] +* Nest pass-through objects within objects {es-pull}105062[#105062] +* Restrict usage of certain aggregations when in sort order execution is required {es-pull}104665[#104665] +* Small time series agg improvement {es-pull}106288[#106288] + +Transform:: +* Allow transforms to use PIT with remote clusters again {es-pull}105192[#105192] (issue: {es-issue}104518[#104518]) +* Transforms: Adding basic stats API param {es-pull}104878[#104878] + +Vector Search:: +* Add new int8_flat and flat vector index types {es-pull}104872[#104872] +* Add support for more than one `inner_hit` when searching nested vectors {es-pull}104006[#104006] +* Making `k` and `num_candidates` optional for knn search {es-pull}101209[#101209] (issue: {es-issue}97533[#97533]) + +[[feature-8.13.0]] +[float] +=== New features + +Data streams:: +* Add `require_data_stream` parameter to indexing requests to enforce indexing operations target a data stream {es-pull}101872[#101872] (issue: {es-issue}97032[#97032]) +* Redirect failed ingest node operations to a failure store when available {es-pull}103481[#103481] + +ES|QL:: +* ESQL: Introduce mode setting for ENRICH {es-pull}103949[#103949] +* ESQL: add =~ operator (case insensitive equality) {es-pull}103656[#103656] + +Health:: +* Create a DSL health indicator as part of the health API {es-pull}103130[#103130] + +Infra/Core:: +* Add gradle tasks and code to modify and access mappings between version ids and release versions {es-pull}103627[#103627] + +Mapping:: +* Add `unmatch_mapping_type`, and support array of types {es-pull}103171[#103171] (issues: {es-issue}102807[#102807], {es-issue}102795[#102795]) + +Search:: +* Added Duplicate Word Check Feature to Analysis Nori {es-pull}103325[#103325] (issue: {es-issue}103321[#103321]) +* [Synonyms] Mark Synonyms as GA {es-pull}103223[#103223] + +[[upgrade-8.13.0]] +[float] +=== Upgrades + +Query Languages:: +* Upgrade ANTLR4 to 4.13.1 {es-pull}105334[#105334] (issue: {es-issue}102953[#102953]) + +Search:: +* Upgrade to Lucene 9.9.0 {es-pull}102782[#102782] +* Upgrade to Lucene 9.9.1 {es-pull}103387[#103387] +* Upgrade to Lucene 9.9.2 {es-pull}104753[#104753] + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 92cd447a48deb..25096779521e4 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -62,6 +62,16 @@ fields that don't have a value. This can be done through the newly added {es-pull}103651[#103651] +[discrete] +[[new_lucene_9_10_release]] +=== New Lucene 9.10 release +- https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. +- https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search +- https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. +- https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. + +{es-pull}105578[#105578] + // end::notable-highlights[] From ca5a7519a9f1d16c548a3eba1ba1021515fcc0cd Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 26 Mar 2024 14:36:13 -0500 Subject: [PATCH 187/214] Updating FullClusterRestartIT.testWatcher to account for watcher running (#106697) --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 254d12a05d936..d7760eb42a1db 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -731,7 +731,7 @@ private void assertBasicWatchInteractions() throws Exception { Map updateWatch = entityAsMap(client().performRequest(createWatchRequest)); assertThat(updateWatch.get("created"), equalTo(false)); - assertThat(updateWatch.get("_version"), equalTo(2)); + assertThat((int) updateWatch.get("_version"), greaterThanOrEqualTo(2)); Map get = entityAsMap(client().performRequest(new Request("GET", "_watcher/watch/new_watch"))); assertThat(get.get("found"), equalTo(true)); From 930654b496552cf3fc81408d171e151a0181666e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Mar 2024 19:56:29 +0000 Subject: [PATCH 188/214] Bump versions after 7.17.19 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++++++++ .buildkite/pipelines/periodic.yml | 14 ++++++++++++-- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 2 +- .../src/main/java/org/elasticsearch/Version.java | 1 + .../org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 34 insertions(+), 4 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 3283e691f121c..f45caaf7fdfaf 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 5e7c1a0960789..c38e0e48cd070 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1137,6 +1137,22 @@ steps: env: BWC_VERSION: 7.17.19 + - label: "{{matrix.image}} / 7.17.20 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.20 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.20 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 8e1ff14eda792..23f0e7d4bbacf 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -692,6 +692,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.19 + - label: 7.17.20 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.20#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.20 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 @@ -1246,7 +1256,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -1290,7 +1300,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.19", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 8b454fa92ab02..bc5c24cf0f365 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -68,6 +68,7 @@ BWC_VERSION: - "7.17.17" - "7.17.18" - "7.17.19" + - "7.17.20" - "8.0.0" - "8.0.1" - "8.1.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index d85a432684495..6d391a3fd72ae 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "7.17.19" + - "7.17.20" - "8.12.3" - "8.13.0" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 241af6e7b6c45..391ede4d2aa40 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -119,6 +119,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_17 = new Version(7_17_17_99); public static final Version V_7_17_18 = new Version(7_17_18_99); public static final Version V_7_17_19 = new Version(7_17_19_99); + public static final Version V_7_17_20 = new Version(7_17_20_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index b392111557615..17f594ec992d1 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -66,6 +66,7 @@ 7.17.16,7171699 7.17.17,7171799 7.17.18,7171899 +7.17.19,7171999 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index f2da9fcaf60ce..b29ae972c9b13 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -66,6 +66,7 @@ 7.17.16,7171699 7.17.17,7171799 7.17.18,7171899 +7.17.19,7171999 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 From cba9e5be9f21d85e91c1c90ae4e3f12ef8778d95 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Mar 2024 20:02:39 +0000 Subject: [PATCH 189/214] Prune changelogs after 7.17.19 release --- docs/changelog/100740.yaml | 6 ------ docs/changelog/100813.yaml | 6 ------ docs/changelog/101209.yaml | 6 ------ docs/changelog/101487.yaml | 5 ----- docs/changelog/101640.yaml | 5 ----- docs/changelog/101656.yaml | 5 ----- docs/changelog/101717.yaml | 5 ----- docs/changelog/101872.yaml | 6 ------ docs/changelog/102078.yaml | 5 ----- docs/changelog/102207.yaml | 6 ------ docs/changelog/102371.yaml | 5 ----- docs/changelog/102428.yaml | 5 ----- docs/changelog/102435.yaml | 5 ----- docs/changelog/102557.yaml | 5 ----- docs/changelog/102559.yaml | 5 ----- docs/changelog/102584.yaml | 5 ----- docs/changelog/102726.yaml | 5 ----- docs/changelog/102759.yaml | 6 ------ docs/changelog/102765.yaml | 5 ----- docs/changelog/102782.yaml | 5 ----- docs/changelog/102798.yaml | 5 ----- docs/changelog/102824.yaml | 5 ----- docs/changelog/102862.yaml | 5 ----- docs/changelog/102879.yaml | 5 ----- docs/changelog/102885.yaml | 5 ----- docs/changelog/103025.yaml | 5 ----- docs/changelog/103032.yaml | 5 ----- docs/changelog/103033.yaml | 5 ----- docs/changelog/103035.yaml | 5 ----- docs/changelog/103084.yaml | 6 ------ docs/changelog/103091.yaml | 5 ----- docs/changelog/103099.yaml | 6 ------ docs/changelog/103130.yaml | 5 ----- docs/changelog/103160.yaml | 5 ----- docs/changelog/103171.yaml | 7 ------- docs/changelog/103176.yaml | 5 ----- docs/changelog/103178.yaml | 5 ----- docs/changelog/103190.yaml | 5 ----- docs/changelog/103223.yaml | 10 ---------- docs/changelog/103232.yaml | 5 ----- docs/changelog/103300.yaml | 5 ----- docs/changelog/103309.yaml | 6 ------ docs/changelog/103310.yaml | 5 ----- docs/changelog/103316.yaml | 5 ----- docs/changelog/103325.yaml | 6 ------ docs/changelog/103340.yaml | 5 ----- docs/changelog/103387.yaml | 5 ----- docs/changelog/103398.yaml | 5 ----- docs/changelog/103399.yaml | 6 ------ docs/changelog/103434.yaml | 11 ----------- docs/changelog/103453.yaml | 5 ----- docs/changelog/103461.yaml | 5 ----- docs/changelog/103481.yaml | 5 ----- docs/changelog/103510.yaml | 6 ------ docs/changelog/103520.yaml | 5 ----- docs/changelog/103535.yaml | 5 ----- docs/changelog/103538.yaml | 6 ------ docs/changelog/103555.yaml | 6 ------ docs/changelog/103592.yaml | 5 ----- docs/changelog/103610.yaml | 6 ------ docs/changelog/103627.yaml | 5 ----- docs/changelog/103628.yaml | 5 ----- docs/changelog/103632.yaml | 5 ----- docs/changelog/103633.yaml | 5 ----- docs/changelog/103643.yaml | 5 ----- docs/changelog/103646.yaml | 5 ----- docs/changelog/103648.yaml | 5 ----- docs/changelog/103651.yaml | 12 ------------ docs/changelog/103656.yaml | 5 ----- docs/changelog/103669.yaml | 5 ----- docs/changelog/103673.yaml | 6 ------ docs/changelog/103681.yaml | 6 ------ docs/changelog/103682.yaml | 6 ------ docs/changelog/103698.yaml | 5 ----- docs/changelog/103710.yaml | 5 ----- docs/changelog/103720.yaml | 6 ------ docs/changelog/103727.yaml | 5 ----- docs/changelog/103763.yaml | 6 ------ docs/changelog/103783.yaml | 5 ----- docs/changelog/103807.yaml | 6 ------ docs/changelog/103821.yaml | 5 ----- docs/changelog/103846.yaml | 5 ----- docs/changelog/103898.yaml | 14 -------------- docs/changelog/103903.yaml | 5 ----- docs/changelog/103920.yaml | 5 ----- docs/changelog/103922.yaml | 5 ----- docs/changelog/103928.yaml | 5 ----- docs/changelog/103948.yaml | 6 ------ docs/changelog/103949.yaml | 5 ----- docs/changelog/103959.yaml | 5 ----- docs/changelog/103973.yaml | 5 ----- docs/changelog/103996.yaml | 5 ----- docs/changelog/104006.yaml | 5 ----- docs/changelog/104030.yaml | 5 ----- docs/changelog/104033.yaml | 5 ----- docs/changelog/104043.yaml | 5 ----- docs/changelog/104063.yaml | 5 ----- docs/changelog/104077.yaml | 5 ----- docs/changelog/104091.yaml | 11 ----------- docs/changelog/104092.yaml | 5 ----- docs/changelog/104099.yaml | 6 ------ docs/changelog/104113.yaml | 5 ----- docs/changelog/104118.yaml | 6 ------ docs/changelog/104122.yaml | 5 ----- docs/changelog/104132.yaml | 5 ----- docs/changelog/104142.yaml | 5 ----- docs/changelog/104150.yaml | 5 ----- docs/changelog/104155.yaml | 6 ------ docs/changelog/104182.yaml | 5 ----- docs/changelog/104200.yaml | 5 ----- docs/changelog/104209.yaml | 13 ------------- docs/changelog/104218.yaml | 6 ------ docs/changelog/104227.yaml | 6 ------ docs/changelog/104230.yaml | 5 ----- docs/changelog/104265.yaml | 6 ------ docs/changelog/104269.yaml | 5 ----- docs/changelog/104309.yaml | 5 ----- docs/changelog/104320.yaml | 5 ----- docs/changelog/104334.yaml | 5 ----- docs/changelog/104355.yaml | 5 ----- docs/changelog/104356.yaml | 5 ----- docs/changelog/104363.yaml | 5 ----- docs/changelog/104386.yaml | 6 ------ docs/changelog/104387.yaml | 6 ------ docs/changelog/104394.yaml | 5 ----- docs/changelog/104396.yaml | 5 ----- docs/changelog/104406.yaml | 5 ----- docs/changelog/104407.yaml | 6 ------ docs/changelog/104408.yaml | 5 ----- docs/changelog/104433.yaml | 5 ----- docs/changelog/104440.yaml | 6 ------ docs/changelog/104460.yaml | 5 ----- docs/changelog/104483.yaml | 5 ----- docs/changelog/104500.yaml | 5 ----- docs/changelog/104505.yaml | 5 ----- docs/changelog/104529.yaml | 5 ----- docs/changelog/104553.yaml | 5 ----- docs/changelog/104559.yaml | 5 ----- docs/changelog/104573.yaml | 5 ----- docs/changelog/104574.yaml | 10 ---------- docs/changelog/104575.yaml | 5 ----- docs/changelog/104581.yaml | 6 ------ docs/changelog/104594.yaml | 5 ----- docs/changelog/104614.yaml | 6 ------ docs/changelog/104625.yaml | 6 ------ docs/changelog/104636.yaml | 5 ----- docs/changelog/104643.yaml | 5 ----- docs/changelog/104648.yaml | 5 ----- docs/changelog/104654.yaml | 5 ----- docs/changelog/104665.yaml | 5 ----- docs/changelog/104666.yaml | 5 ----- docs/changelog/104674.yaml | 5 ----- docs/changelog/104718.yaml | 6 ------ docs/changelog/104721.yaml | 6 ------ docs/changelog/104730.yaml | 5 ----- docs/changelog/104750.yaml | 5 ----- docs/changelog/104753.yaml | 5 ----- docs/changelog/104778.yaml | 5 ----- docs/changelog/104784.yaml | 5 ----- docs/changelog/104787.yaml | 5 ----- docs/changelog/104796.yaml | 5 ----- docs/changelog/104840.yaml | 5 ----- docs/changelog/104859.yaml | 5 ----- docs/changelog/104872.yaml | 5 ----- docs/changelog/104878.yaml | 5 ----- docs/changelog/104893.yaml | 5 ----- docs/changelog/104895.yaml | 5 ----- docs/changelog/104905.yaml | 6 ------ docs/changelog/104909.yaml | 5 ----- docs/changelog/104911.yaml | 7 ------- docs/changelog/104927.yaml | 5 ----- docs/changelog/104936.yaml | 5 ----- docs/changelog/104943.yaml | 5 ----- docs/changelog/104949.yaml | 5 ----- docs/changelog/104958.yaml | 5 ----- docs/changelog/104982.yaml | 5 ----- docs/changelog/104993.yaml | 5 ----- docs/changelog/104996.yaml | 5 ----- docs/changelog/105015.yaml | 5 ----- docs/changelog/105044.yaml | 5 ----- docs/changelog/105055.yaml | 5 ----- docs/changelog/105062.yaml | 5 ----- docs/changelog/105064.yaml | 17 ----------------- docs/changelog/105081.yaml | 6 ------ docs/changelog/105088.yaml | 5 ----- docs/changelog/105089.yaml | 6 ------ docs/changelog/105103.yaml | 5 ----- docs/changelog/105105.yaml | 5 ----- docs/changelog/105131.yaml | 5 ----- docs/changelog/105150.yaml | 5 ----- docs/changelog/105163.yaml | 5 ----- docs/changelog/105164.yaml | 6 ------ docs/changelog/105178.yaml | 5 ----- docs/changelog/105180.yaml | 5 ----- docs/changelog/105192.yaml | 6 ------ docs/changelog/105196.yaml | 6 ------ docs/changelog/105207.yaml | 6 ------ docs/changelog/105221.yaml | 14 -------------- docs/changelog/105223.yaml | 5 ----- docs/changelog/105232.yaml | 6 ------ docs/changelog/105249.yaml | 5 ----- docs/changelog/105259.yaml | 5 ----- docs/changelog/105265.yaml | 5 ----- docs/changelog/105269.yaml | 5 ----- docs/changelog/105272.yaml | 5 ----- docs/changelog/105273.yaml | 6 ------ docs/changelog/105289.yaml | 5 ----- docs/changelog/105299.yaml | 6 ------ docs/changelog/105325.yaml | 6 ------ docs/changelog/105334.yaml | 6 ------ docs/changelog/105346.yaml | 5 ----- docs/changelog/105371.yaml | 5 ----- docs/changelog/105373.yaml | 5 ----- docs/changelog/105391.yaml | 5 ----- docs/changelog/105403.yaml | 6 ------ docs/changelog/105427.yaml | 5 ----- docs/changelog/105428.yaml | 5 ----- docs/changelog/105429.yaml | 5 ----- docs/changelog/105440.yaml | 6 ------ docs/changelog/105442.yaml | 6 ------ docs/changelog/105458.yaml | 5 ----- docs/changelog/105468.yaml | 5 ----- docs/changelog/105476.yaml | 6 ------ docs/changelog/105486.yaml | 5 ----- docs/changelog/105499.yaml | 5 ----- docs/changelog/105546.yaml | 6 ------ docs/changelog/105578.yaml | 13 ------------- docs/changelog/105588.yaml | 5 ----- docs/changelog/105593.yaml | 5 ----- docs/changelog/105633.yaml | 6 ------ docs/changelog/105650.yaml | 6 ------ docs/changelog/105691.yaml | 5 ----- docs/changelog/105770.yaml | 5 ----- docs/changelog/105772.yaml | 5 ----- docs/changelog/105789.yaml | 6 ------ docs/changelog/105848.yaml | 5 ----- docs/changelog/105941.yaml | 5 ----- docs/changelog/105945.yaml | 5 ----- docs/changelog/105987.yaml | 6 ------ docs/changelog/105994.yaml | 5 ----- docs/changelog/106020.yaml | 5 ----- docs/changelog/106057.yaml | 5 ----- docs/changelog/106060.yaml | 5 ----- docs/changelog/106062.yaml | 6 ------ docs/changelog/106105.yaml | 5 ----- docs/changelog/106156.yaml | 6 ------ docs/changelog/106288.yaml | 5 ----- docs/changelog/106329.yaml | 5 ----- docs/changelog/106351.yaml | 6 ------ docs/changelog/106392.yaml | 6 ------ docs/changelog/106398.yaml | 6 ------ docs/changelog/106544.yaml | 6 ------ docs/changelog/106574.yaml | 6 ------ docs/changelog/96235.yaml | 5 ----- docs/changelog/99142.yaml | 6 ------ docs/changelog/99747.yaml | 19 ------------------- docs/changelog/99961.yaml | 6 ------ 257 files changed, 1445 deletions(-) delete mode 100644 docs/changelog/100740.yaml delete mode 100644 docs/changelog/100813.yaml delete mode 100644 docs/changelog/101209.yaml delete mode 100644 docs/changelog/101487.yaml delete mode 100644 docs/changelog/101640.yaml delete mode 100644 docs/changelog/101656.yaml delete mode 100644 docs/changelog/101717.yaml delete mode 100644 docs/changelog/101872.yaml delete mode 100644 docs/changelog/102078.yaml delete mode 100644 docs/changelog/102207.yaml delete mode 100644 docs/changelog/102371.yaml delete mode 100644 docs/changelog/102428.yaml delete mode 100644 docs/changelog/102435.yaml delete mode 100644 docs/changelog/102557.yaml delete mode 100644 docs/changelog/102559.yaml delete mode 100644 docs/changelog/102584.yaml delete mode 100644 docs/changelog/102726.yaml delete mode 100644 docs/changelog/102759.yaml delete mode 100644 docs/changelog/102765.yaml delete mode 100644 docs/changelog/102782.yaml delete mode 100644 docs/changelog/102798.yaml delete mode 100644 docs/changelog/102824.yaml delete mode 100644 docs/changelog/102862.yaml delete mode 100644 docs/changelog/102879.yaml delete mode 100644 docs/changelog/102885.yaml delete mode 100644 docs/changelog/103025.yaml delete mode 100644 docs/changelog/103032.yaml delete mode 100644 docs/changelog/103033.yaml delete mode 100644 docs/changelog/103035.yaml delete mode 100644 docs/changelog/103084.yaml delete mode 100644 docs/changelog/103091.yaml delete mode 100644 docs/changelog/103099.yaml delete mode 100644 docs/changelog/103130.yaml delete mode 100644 docs/changelog/103160.yaml delete mode 100644 docs/changelog/103171.yaml delete mode 100644 docs/changelog/103176.yaml delete mode 100644 docs/changelog/103178.yaml delete mode 100644 docs/changelog/103190.yaml delete mode 100644 docs/changelog/103223.yaml delete mode 100644 docs/changelog/103232.yaml delete mode 100644 docs/changelog/103300.yaml delete mode 100644 docs/changelog/103309.yaml delete mode 100644 docs/changelog/103310.yaml delete mode 100644 docs/changelog/103316.yaml delete mode 100644 docs/changelog/103325.yaml delete mode 100644 docs/changelog/103340.yaml delete mode 100644 docs/changelog/103387.yaml delete mode 100644 docs/changelog/103398.yaml delete mode 100644 docs/changelog/103399.yaml delete mode 100644 docs/changelog/103434.yaml delete mode 100644 docs/changelog/103453.yaml delete mode 100644 docs/changelog/103461.yaml delete mode 100644 docs/changelog/103481.yaml delete mode 100644 docs/changelog/103510.yaml delete mode 100644 docs/changelog/103520.yaml delete mode 100644 docs/changelog/103535.yaml delete mode 100644 docs/changelog/103538.yaml delete mode 100644 docs/changelog/103555.yaml delete mode 100644 docs/changelog/103592.yaml delete mode 100644 docs/changelog/103610.yaml delete mode 100644 docs/changelog/103627.yaml delete mode 100644 docs/changelog/103628.yaml delete mode 100644 docs/changelog/103632.yaml delete mode 100644 docs/changelog/103633.yaml delete mode 100644 docs/changelog/103643.yaml delete mode 100644 docs/changelog/103646.yaml delete mode 100644 docs/changelog/103648.yaml delete mode 100644 docs/changelog/103651.yaml delete mode 100644 docs/changelog/103656.yaml delete mode 100644 docs/changelog/103669.yaml delete mode 100644 docs/changelog/103673.yaml delete mode 100644 docs/changelog/103681.yaml delete mode 100644 docs/changelog/103682.yaml delete mode 100644 docs/changelog/103698.yaml delete mode 100644 docs/changelog/103710.yaml delete mode 100644 docs/changelog/103720.yaml delete mode 100644 docs/changelog/103727.yaml delete mode 100644 docs/changelog/103763.yaml delete mode 100644 docs/changelog/103783.yaml delete mode 100644 docs/changelog/103807.yaml delete mode 100644 docs/changelog/103821.yaml delete mode 100644 docs/changelog/103846.yaml delete mode 100644 docs/changelog/103898.yaml delete mode 100644 docs/changelog/103903.yaml delete mode 100644 docs/changelog/103920.yaml delete mode 100644 docs/changelog/103922.yaml delete mode 100644 docs/changelog/103928.yaml delete mode 100644 docs/changelog/103948.yaml delete mode 100644 docs/changelog/103949.yaml delete mode 100644 docs/changelog/103959.yaml delete mode 100644 docs/changelog/103973.yaml delete mode 100644 docs/changelog/103996.yaml delete mode 100644 docs/changelog/104006.yaml delete mode 100644 docs/changelog/104030.yaml delete mode 100644 docs/changelog/104033.yaml delete mode 100644 docs/changelog/104043.yaml delete mode 100644 docs/changelog/104063.yaml delete mode 100644 docs/changelog/104077.yaml delete mode 100644 docs/changelog/104091.yaml delete mode 100644 docs/changelog/104092.yaml delete mode 100644 docs/changelog/104099.yaml delete mode 100644 docs/changelog/104113.yaml delete mode 100644 docs/changelog/104118.yaml delete mode 100644 docs/changelog/104122.yaml delete mode 100644 docs/changelog/104132.yaml delete mode 100644 docs/changelog/104142.yaml delete mode 100644 docs/changelog/104150.yaml delete mode 100644 docs/changelog/104155.yaml delete mode 100644 docs/changelog/104182.yaml delete mode 100644 docs/changelog/104200.yaml delete mode 100644 docs/changelog/104209.yaml delete mode 100644 docs/changelog/104218.yaml delete mode 100644 docs/changelog/104227.yaml delete mode 100644 docs/changelog/104230.yaml delete mode 100644 docs/changelog/104265.yaml delete mode 100644 docs/changelog/104269.yaml delete mode 100644 docs/changelog/104309.yaml delete mode 100644 docs/changelog/104320.yaml delete mode 100644 docs/changelog/104334.yaml delete mode 100644 docs/changelog/104355.yaml delete mode 100644 docs/changelog/104356.yaml delete mode 100644 docs/changelog/104363.yaml delete mode 100644 docs/changelog/104386.yaml delete mode 100644 docs/changelog/104387.yaml delete mode 100644 docs/changelog/104394.yaml delete mode 100644 docs/changelog/104396.yaml delete mode 100644 docs/changelog/104406.yaml delete mode 100644 docs/changelog/104407.yaml delete mode 100644 docs/changelog/104408.yaml delete mode 100644 docs/changelog/104433.yaml delete mode 100644 docs/changelog/104440.yaml delete mode 100644 docs/changelog/104460.yaml delete mode 100644 docs/changelog/104483.yaml delete mode 100644 docs/changelog/104500.yaml delete mode 100644 docs/changelog/104505.yaml delete mode 100644 docs/changelog/104529.yaml delete mode 100644 docs/changelog/104553.yaml delete mode 100644 docs/changelog/104559.yaml delete mode 100644 docs/changelog/104573.yaml delete mode 100644 docs/changelog/104574.yaml delete mode 100644 docs/changelog/104575.yaml delete mode 100644 docs/changelog/104581.yaml delete mode 100644 docs/changelog/104594.yaml delete mode 100644 docs/changelog/104614.yaml delete mode 100644 docs/changelog/104625.yaml delete mode 100644 docs/changelog/104636.yaml delete mode 100644 docs/changelog/104643.yaml delete mode 100644 docs/changelog/104648.yaml delete mode 100644 docs/changelog/104654.yaml delete mode 100644 docs/changelog/104665.yaml delete mode 100644 docs/changelog/104666.yaml delete mode 100644 docs/changelog/104674.yaml delete mode 100644 docs/changelog/104718.yaml delete mode 100644 docs/changelog/104721.yaml delete mode 100644 docs/changelog/104730.yaml delete mode 100644 docs/changelog/104750.yaml delete mode 100644 docs/changelog/104753.yaml delete mode 100644 docs/changelog/104778.yaml delete mode 100644 docs/changelog/104784.yaml delete mode 100644 docs/changelog/104787.yaml delete mode 100644 docs/changelog/104796.yaml delete mode 100644 docs/changelog/104840.yaml delete mode 100644 docs/changelog/104859.yaml delete mode 100644 docs/changelog/104872.yaml delete mode 100644 docs/changelog/104878.yaml delete mode 100644 docs/changelog/104893.yaml delete mode 100644 docs/changelog/104895.yaml delete mode 100644 docs/changelog/104905.yaml delete mode 100644 docs/changelog/104909.yaml delete mode 100644 docs/changelog/104911.yaml delete mode 100644 docs/changelog/104927.yaml delete mode 100644 docs/changelog/104936.yaml delete mode 100644 docs/changelog/104943.yaml delete mode 100644 docs/changelog/104949.yaml delete mode 100644 docs/changelog/104958.yaml delete mode 100644 docs/changelog/104982.yaml delete mode 100644 docs/changelog/104993.yaml delete mode 100644 docs/changelog/104996.yaml delete mode 100644 docs/changelog/105015.yaml delete mode 100644 docs/changelog/105044.yaml delete mode 100644 docs/changelog/105055.yaml delete mode 100644 docs/changelog/105062.yaml delete mode 100644 docs/changelog/105064.yaml delete mode 100644 docs/changelog/105081.yaml delete mode 100644 docs/changelog/105088.yaml delete mode 100644 docs/changelog/105089.yaml delete mode 100644 docs/changelog/105103.yaml delete mode 100644 docs/changelog/105105.yaml delete mode 100644 docs/changelog/105131.yaml delete mode 100644 docs/changelog/105150.yaml delete mode 100644 docs/changelog/105163.yaml delete mode 100644 docs/changelog/105164.yaml delete mode 100644 docs/changelog/105178.yaml delete mode 100644 docs/changelog/105180.yaml delete mode 100644 docs/changelog/105192.yaml delete mode 100644 docs/changelog/105196.yaml delete mode 100644 docs/changelog/105207.yaml delete mode 100644 docs/changelog/105221.yaml delete mode 100644 docs/changelog/105223.yaml delete mode 100644 docs/changelog/105232.yaml delete mode 100644 docs/changelog/105249.yaml delete mode 100644 docs/changelog/105259.yaml delete mode 100644 docs/changelog/105265.yaml delete mode 100644 docs/changelog/105269.yaml delete mode 100644 docs/changelog/105272.yaml delete mode 100644 docs/changelog/105273.yaml delete mode 100644 docs/changelog/105289.yaml delete mode 100644 docs/changelog/105299.yaml delete mode 100644 docs/changelog/105325.yaml delete mode 100644 docs/changelog/105334.yaml delete mode 100644 docs/changelog/105346.yaml delete mode 100644 docs/changelog/105371.yaml delete mode 100644 docs/changelog/105373.yaml delete mode 100644 docs/changelog/105391.yaml delete mode 100644 docs/changelog/105403.yaml delete mode 100644 docs/changelog/105427.yaml delete mode 100644 docs/changelog/105428.yaml delete mode 100644 docs/changelog/105429.yaml delete mode 100644 docs/changelog/105440.yaml delete mode 100644 docs/changelog/105442.yaml delete mode 100644 docs/changelog/105458.yaml delete mode 100644 docs/changelog/105468.yaml delete mode 100644 docs/changelog/105476.yaml delete mode 100644 docs/changelog/105486.yaml delete mode 100644 docs/changelog/105499.yaml delete mode 100644 docs/changelog/105546.yaml delete mode 100644 docs/changelog/105578.yaml delete mode 100644 docs/changelog/105588.yaml delete mode 100644 docs/changelog/105593.yaml delete mode 100644 docs/changelog/105633.yaml delete mode 100644 docs/changelog/105650.yaml delete mode 100644 docs/changelog/105691.yaml delete mode 100644 docs/changelog/105770.yaml delete mode 100644 docs/changelog/105772.yaml delete mode 100644 docs/changelog/105789.yaml delete mode 100644 docs/changelog/105848.yaml delete mode 100644 docs/changelog/105941.yaml delete mode 100644 docs/changelog/105945.yaml delete mode 100644 docs/changelog/105987.yaml delete mode 100644 docs/changelog/105994.yaml delete mode 100644 docs/changelog/106020.yaml delete mode 100644 docs/changelog/106057.yaml delete mode 100644 docs/changelog/106060.yaml delete mode 100644 docs/changelog/106062.yaml delete mode 100644 docs/changelog/106105.yaml delete mode 100644 docs/changelog/106156.yaml delete mode 100644 docs/changelog/106288.yaml delete mode 100644 docs/changelog/106329.yaml delete mode 100644 docs/changelog/106351.yaml delete mode 100644 docs/changelog/106392.yaml delete mode 100644 docs/changelog/106398.yaml delete mode 100644 docs/changelog/106544.yaml delete mode 100644 docs/changelog/106574.yaml delete mode 100644 docs/changelog/96235.yaml delete mode 100644 docs/changelog/99142.yaml delete mode 100644 docs/changelog/99747.yaml delete mode 100644 docs/changelog/99961.yaml diff --git a/docs/changelog/100740.yaml b/docs/changelog/100740.yaml deleted file mode 100644 index c93fbf676ef81..0000000000000 --- a/docs/changelog/100740.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100740 -summary: "ESQL: Referencing expressions that contain backticks requires <>." -area: ES|QL -type: enhancement -issues: - - 100312 diff --git a/docs/changelog/100813.yaml b/docs/changelog/100813.yaml deleted file mode 100644 index 476098b62c106..0000000000000 --- a/docs/changelog/100813.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 100813 -summary: Make `ParentTaskAssigningClient.getRemoteClusterClient` method also return - `ParentTaskAssigningClient` -area: Infra/Transport API -type: enhancement -issues: [] diff --git a/docs/changelog/101209.yaml b/docs/changelog/101209.yaml deleted file mode 100644 index debec27e61307..0000000000000 --- a/docs/changelog/101209.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101209 -summary: "Making `k` and `num_candidates` optional for knn search" -area: Vector Search -type: enhancement -issues: - - 97533 diff --git a/docs/changelog/101487.yaml b/docs/changelog/101487.yaml deleted file mode 100644 index b4531f7fd6f75..0000000000000 --- a/docs/changelog/101487.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101487 -summary: Wait for async searches to finish when shutting down -area: Infra/Node Lifecycle -type: enhancement -issues: [] diff --git a/docs/changelog/101640.yaml b/docs/changelog/101640.yaml deleted file mode 100644 index 6f61a3a3ffd84..0000000000000 --- a/docs/changelog/101640.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101640 -summary: Support cross clusters query in ESQL -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/101656.yaml b/docs/changelog/101656.yaml deleted file mode 100644 index 7cd4f30cae849..0000000000000 --- a/docs/changelog/101656.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101656 -summary: Adjust interception of requests for specific shard IDs -area: Authorization -type: bug -issues: [] diff --git a/docs/changelog/101717.yaml b/docs/changelog/101717.yaml deleted file mode 100644 index 7e97ef1049f88..0000000000000 --- a/docs/changelog/101717.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 101717 -summary: Pause shard snapshots on graceful shutdown -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/101872.yaml b/docs/changelog/101872.yaml deleted file mode 100644 index 1c63c2d8b009a..0000000000000 --- a/docs/changelog/101872.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 101872 -summary: "Add `require_data_stream` parameter to indexing requests to enforce indexing operations target a data stream" -area: Data streams -type: feature -issues: - - 97032 diff --git a/docs/changelog/102078.yaml b/docs/changelog/102078.yaml deleted file mode 100644 index d031aa0dbf6f7..0000000000000 --- a/docs/changelog/102078.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102078 -summary: Derive expected replica size from primary -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/102207.yaml b/docs/changelog/102207.yaml deleted file mode 100644 index 8b247828845f4..0000000000000 --- a/docs/changelog/102207.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102207 -summary: Fix disk computation when initializing unassigned shards in desired balance - computation -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/102371.yaml b/docs/changelog/102371.yaml deleted file mode 100644 index 5a698bc9d671a..0000000000000 --- a/docs/changelog/102371.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102371 -summary: Adding threadpool metrics -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/102428.yaml b/docs/changelog/102428.yaml deleted file mode 100644 index 275492fa6a888..0000000000000 --- a/docs/changelog/102428.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102428 -summary: "ESQL: Add option to drop null fields" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/102435.yaml b/docs/changelog/102435.yaml deleted file mode 100644 index e8905b08f1adc..0000000000000 --- a/docs/changelog/102435.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102435 -summary: S3 first byte latency metric -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102557.yaml b/docs/changelog/102557.yaml deleted file mode 100644 index dfca1763064d4..0000000000000 --- a/docs/changelog/102557.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102557 -summary: Metrics for search latencies -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102559.yaml b/docs/changelog/102559.yaml deleted file mode 100644 index ad0867ab087b9..0000000000000 --- a/docs/changelog/102559.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102559 -summary: "Prune unnecessary information from TransportNodesStatsAction.NodeStatsRequest" -area: Network -type: enhancement -issues: [100878] diff --git a/docs/changelog/102584.yaml b/docs/changelog/102584.yaml deleted file mode 100644 index 44ff5dd9f7461..0000000000000 --- a/docs/changelog/102584.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102584 -summary: Expose some ML metrics via APM -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102726.yaml b/docs/changelog/102726.yaml deleted file mode 100644 index bc5b311481123..0000000000000 --- a/docs/changelog/102726.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102726 -summary: Resolve Cluster API -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/102759.yaml b/docs/changelog/102759.yaml deleted file mode 100644 index 1c002ef2b678e..0000000000000 --- a/docs/changelog/102759.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102759 -summary: Close rather than stop `HttpServerTransport` on shutdown -area: Infra/Node Lifecycle -type: bug -issues: - - 102501 diff --git a/docs/changelog/102765.yaml b/docs/changelog/102765.yaml deleted file mode 100644 index eb73da2650542..0000000000000 --- a/docs/changelog/102765.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102765 -summary: "Add APM metrics to `HealthPeriodicLogger`" -area: Health -type: enhancement -issues: [] diff --git a/docs/changelog/102782.yaml b/docs/changelog/102782.yaml deleted file mode 100644 index ed0a004765859..0000000000000 --- a/docs/changelog/102782.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102782 -summary: Upgrade to Lucene 9.9.0 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/102798.yaml b/docs/changelog/102798.yaml deleted file mode 100644 index 986ad99f96a19..0000000000000 --- a/docs/changelog/102798.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102798 -summary: Hot-reloadable remote cluster credentials -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/102824.yaml b/docs/changelog/102824.yaml deleted file mode 100644 index 21b39a4c3999d..0000000000000 --- a/docs/changelog/102824.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102824 -summary: Change detection aggregation improvements -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/102862.yaml b/docs/changelog/102862.yaml deleted file mode 100644 index bb453163009d5..0000000000000 --- a/docs/changelog/102862.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102862 -summary: Add optional pruning configuration (weighted terms scoring) to text expansion query -area: "Machine Learning" -type: enhancement -issues: [] diff --git a/docs/changelog/102879.yaml b/docs/changelog/102879.yaml deleted file mode 100644 index b35d36dd0a3a9..0000000000000 --- a/docs/changelog/102879.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102879 -summary: Fix disk computation when initializing new shards -area: Allocation -type: bug -issues: [] diff --git a/docs/changelog/102885.yaml b/docs/changelog/102885.yaml deleted file mode 100644 index 7a998c3eb1f66..0000000000000 --- a/docs/changelog/102885.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102885 -summary: Make field limit more predictable -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/103025.yaml b/docs/changelog/103025.yaml deleted file mode 100644 index 856a7c022d5dd..0000000000000 --- a/docs/changelog/103025.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103025 -summary: "Metrics: Allow `AsyncCounters` to switch providers" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/103032.yaml b/docs/changelog/103032.yaml deleted file mode 100644 index 81d84fca0bdb0..0000000000000 --- a/docs/changelog/103032.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103032 -summary: "x-pack/plugin/apm-data: Map some APM fields as flattened and fix error.grouping_name script" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/103033.yaml b/docs/changelog/103033.yaml deleted file mode 100644 index 30f8e182b9998..0000000000000 --- a/docs/changelog/103033.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103033 -summary: "X-pack/plugin/core: rename `double_metrics` template" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/103035.yaml b/docs/changelog/103035.yaml deleted file mode 100644 index 5b1c9d6629767..0000000000000 --- a/docs/changelog/103035.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103035 -summary: "x-pack/plugin/core: add `match_mapping_type` to `ecs@mappings` dynamic templates" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/103084.yaml b/docs/changelog/103084.yaml deleted file mode 100644 index fb5a718a086de..0000000000000 --- a/docs/changelog/103084.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103084 -summary: Return `matched_queries` in Percolator -area: Percolator -type: enhancement -issues: - - 10163 diff --git a/docs/changelog/103091.yaml b/docs/changelog/103091.yaml deleted file mode 100644 index ae4ac11933d4e..0000000000000 --- a/docs/changelog/103091.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103091 -summary: "Metrics: Handle null observations in observers" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/103099.yaml b/docs/changelog/103099.yaml deleted file mode 100644 index c3fd3f9d7b8e4..0000000000000 --- a/docs/changelog/103099.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103099 -summary: "ESQL: Simpify IS NULL/IS NOT NULL evaluation" -area: ES|QL -type: enhancement -issues: - - 103097 diff --git a/docs/changelog/103130.yaml b/docs/changelog/103130.yaml deleted file mode 100644 index 3ef56ae84d123..0000000000000 --- a/docs/changelog/103130.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103130 -summary: Create a DSL health indicator as part of the health API -area: Health -type: feature -issues: [] diff --git a/docs/changelog/103160.yaml b/docs/changelog/103160.yaml deleted file mode 100644 index 7701aa2b4a8d4..0000000000000 --- a/docs/changelog/103160.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103160 -summary: Set thread name used by REST client -area: Java Low Level REST Client -type: enhancement -issues: [] diff --git a/docs/changelog/103171.yaml b/docs/changelog/103171.yaml deleted file mode 100644 index 95ad6a1ea77c2..0000000000000 --- a/docs/changelog/103171.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 103171 -summary: "Add `unmatch_mapping_type`, and support array of types" -area: Mapping -type: feature -issues: - - 102807 - - 102795 diff --git a/docs/changelog/103176.yaml b/docs/changelog/103176.yaml deleted file mode 100644 index a0f46c1462f62..0000000000000 --- a/docs/changelog/103176.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103176 -summary: Validate settings in `ReloadSecureSettings` API -area: Client -type: bug -issues: [] diff --git a/docs/changelog/103178.yaml b/docs/changelog/103178.yaml deleted file mode 100644 index 5da0221a68984..0000000000000 --- a/docs/changelog/103178.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103178 -summary: Expose API key authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/103190.yaml b/docs/changelog/103190.yaml deleted file mode 100644 index 5e6927d3eadd7..0000000000000 --- a/docs/changelog/103190.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103190 -summary: ILM/SLM history policies forcemerge in hot and dsl configuration -area: ILM+SLM -type: enhancement -issues: [] diff --git a/docs/changelog/103223.yaml b/docs/changelog/103223.yaml deleted file mode 100644 index c2f4c1b6a2cf4..0000000000000 --- a/docs/changelog/103223.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 103223 -summary: "[Synonyms] Mark Synonyms as GA" -area: "Search" -type: feature -issues: [] -highlight: - title: "GA Release of Synonyms API" - body: |- - Removes the beta label for the Synonyms API to make it GA. - notable: true diff --git a/docs/changelog/103232.yaml b/docs/changelog/103232.yaml deleted file mode 100644 index b955e7abb7683..0000000000000 --- a/docs/changelog/103232.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103232 -summary: "Remove leniency in msearch parsing" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/103300.yaml b/docs/changelog/103300.yaml deleted file mode 100644 index a536a673b7827..0000000000000 --- a/docs/changelog/103300.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103300 -summary: Retry indefinitely for s3 indices blob read errors -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/103309.yaml b/docs/changelog/103309.yaml deleted file mode 100644 index 94b2a31127870..0000000000000 --- a/docs/changelog/103309.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103309 -summary: Introduce lazy rollover for mapping updates in data streams -area: Data streams -type: enhancement -issues: - - 89346 diff --git a/docs/changelog/103310.yaml b/docs/changelog/103310.yaml deleted file mode 100644 index a7a0746b6b8c4..0000000000000 --- a/docs/changelog/103310.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103310 -summary: Revert "Validate settings in `ReloadSecureSettings` API" -area: Security -type: bug -issues: [] diff --git a/docs/changelog/103316.yaml b/docs/changelog/103316.yaml deleted file mode 100644 index 47eddcc34d924..0000000000000 --- a/docs/changelog/103316.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103316 -summary: Review KEEP logic to prevent duplicate column names -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/103325.yaml b/docs/changelog/103325.yaml deleted file mode 100644 index 7de6c41986490..0000000000000 --- a/docs/changelog/103325.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103325 -summary: Added Duplicate Word Check Feature to Analysis Nori -area: Search -type: feature -issues: - - 103321 diff --git a/docs/changelog/103340.yaml b/docs/changelog/103340.yaml deleted file mode 100644 index 21280dbfc857d..0000000000000 --- a/docs/changelog/103340.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103340 -summary: Avoid humongous blocks -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103387.yaml b/docs/changelog/103387.yaml deleted file mode 100644 index 77239fb9a3778..0000000000000 --- a/docs/changelog/103387.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103387 -summary: Upgrade to Lucene 9.9.1 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/103398.yaml b/docs/changelog/103398.yaml deleted file mode 100644 index 69452616ddc99..0000000000000 --- a/docs/changelog/103398.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103398 -summary: ES|QL Async Query API -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103399.yaml b/docs/changelog/103399.yaml deleted file mode 100644 index 440ac90b313f5..0000000000000 --- a/docs/changelog/103399.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103399 -summary: "add validation on _id field when upsert new doc" -area: Search -type: bug -issues: - - 102981 diff --git a/docs/changelog/103434.yaml b/docs/changelog/103434.yaml deleted file mode 100644 index 56af604fe08f7..0000000000000 --- a/docs/changelog/103434.yaml +++ /dev/null @@ -1,11 +0,0 @@ -pr: 103434 -summary: Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. -area: TSDB -type: breaking -issues: [] -breaking: - title: Lower the `look_ahead_time` index setting's max value - area: Index setting - details: "Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours." - impact: "Any value between 2 hours and 7 days will be as a look ahead time of 2 hours is defined" - notable: false diff --git a/docs/changelog/103453.yaml b/docs/changelog/103453.yaml deleted file mode 100644 index 4b7dab77c8b23..0000000000000 --- a/docs/changelog/103453.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103453 -summary: Add expiration time to update api key api -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103461.yaml b/docs/changelog/103461.yaml deleted file mode 100644 index 3a1bf30aa90c9..0000000000000 --- a/docs/changelog/103461.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103461 -summary: Add support for Well Known Binary (WKB) in the fields API for spatial fields -area: Geo -type: enhancement -issues: [] diff --git a/docs/changelog/103481.yaml b/docs/changelog/103481.yaml deleted file mode 100644 index f7c7c0b6eecc9..0000000000000 --- a/docs/changelog/103481.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103481 -summary: Redirect failed ingest node operations to a failure store when available -area: Data streams -type: feature -issues: [] diff --git a/docs/changelog/103510.yaml b/docs/changelog/103510.yaml deleted file mode 100644 index 50ec8efd5c440..0000000000000 --- a/docs/changelog/103510.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103510 -summary: "ES|QL: better management of exact subfields for TEXT fields" -area: ES|QL -type: bug -issues: - - 99899 diff --git a/docs/changelog/103520.yaml b/docs/changelog/103520.yaml deleted file mode 100644 index 0ef7124eb1ed2..0000000000000 --- a/docs/changelog/103520.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103520 -summary: Request indexing memory pressure in APM node metrics publisher -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/103535.yaml b/docs/changelog/103535.yaml deleted file mode 100644 index 80cf6e1ea709a..0000000000000 --- a/docs/changelog/103535.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103535 -summary: Add replay diagnostic dir to system jvm options -area: Infra/CLI -type: enhancement -issues: [] diff --git a/docs/changelog/103538.yaml b/docs/changelog/103538.yaml deleted file mode 100644 index 5aaed771d5ee4..0000000000000 --- a/docs/changelog/103538.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103538 -summary: "ESQL: Improve pushdown of certain filters" -area: ES|QL -type: bug -issues: - - 103536 diff --git a/docs/changelog/103555.yaml b/docs/changelog/103555.yaml deleted file mode 100644 index 2b0dc2692e252..0000000000000 --- a/docs/changelog/103555.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103555 -summary: "[Security Solution] Allow write permission for `kibana_system` role on endpoint\ - \ response index" -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/103592.yaml b/docs/changelog/103592.yaml deleted file mode 100644 index 21e06f1f5a10d..0000000000000 --- a/docs/changelog/103592.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103592 -summary: Remove deprecated Block APIs -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103610.yaml b/docs/changelog/103610.yaml deleted file mode 100644 index 1ed38cc2822bd..0000000000000 --- a/docs/changelog/103610.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103610 -summary: "ESQL: allow `null` in date math" -area: ES|QL -type: bug -issues: - - 103085 diff --git a/docs/changelog/103627.yaml b/docs/changelog/103627.yaml deleted file mode 100644 index 4b0d9e937542e..0000000000000 --- a/docs/changelog/103627.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103627 -summary: Add gradle tasks and code to modify and access mappings between version ids and release versions -area: Infra/Core -type: feature -issues: [] diff --git a/docs/changelog/103628.yaml b/docs/changelog/103628.yaml deleted file mode 100644 index 42259c7bcde46..0000000000000 --- a/docs/changelog/103628.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103628 -summary: Add ES|QL async delete API -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103632.yaml b/docs/changelog/103632.yaml deleted file mode 100644 index 1d83c6528f371..0000000000000 --- a/docs/changelog/103632.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103632 -summary: "ESQL: Check field exists before load from `_source`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103633.yaml b/docs/changelog/103633.yaml deleted file mode 100644 index 9e36451caafd8..0000000000000 --- a/docs/changelog/103633.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103633 -summary: Update s3 latency metric to use micros -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/103643.yaml b/docs/changelog/103643.yaml deleted file mode 100644 index 966fb57acf566..0000000000000 --- a/docs/changelog/103643.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103643 -summary: "[Profiling] Use shard request cache consistently" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/103646.yaml b/docs/changelog/103646.yaml deleted file mode 100644 index b7a6fae025771..0000000000000 --- a/docs/changelog/103646.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103646 -summary: Add index mapping parameter for `counted_keyword` -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/103648.yaml b/docs/changelog/103648.yaml deleted file mode 100644 index d4fa489a6812c..0000000000000 --- a/docs/changelog/103648.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103648 -summary: Introduce experimental pass-through field type -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/103651.yaml b/docs/changelog/103651.yaml deleted file mode 100644 index 1106044b31fd2..0000000000000 --- a/docs/changelog/103651.yaml +++ /dev/null @@ -1,12 +0,0 @@ -pr: 103651 -summary: Flag in `_field_caps` to return only fields with values in index -area: Search -type: enhancement -issues: [] -highlight: - title: Flag in `_field_caps` to return only fields with values in index - body: |- - We added support for filtering the field capabilities API output by removing - fields that don't have a value. This can be done through the newly added - `include_empty_fields` parameter, which defaults to true. - notable: true diff --git a/docs/changelog/103656.yaml b/docs/changelog/103656.yaml deleted file mode 100644 index 24bd8814029ff..0000000000000 --- a/docs/changelog/103656.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103656 -summary: "ESQL: add =~ operator (case insensitive equality)" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/103669.yaml b/docs/changelog/103669.yaml deleted file mode 100644 index 57361b9d842e4..0000000000000 --- a/docs/changelog/103669.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103669 -summary: Validate inference model ids -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/103673.yaml b/docs/changelog/103673.yaml deleted file mode 100644 index f786b57eba411..0000000000000 --- a/docs/changelog/103673.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103673 -summary: "ESQL: Infer not null for aggregated fields" -area: ES|QL -type: enhancement -issues: - - 102787 diff --git a/docs/changelog/103681.yaml b/docs/changelog/103681.yaml deleted file mode 100644 index bba73c8e3a7d4..0000000000000 --- a/docs/changelog/103681.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103681 -summary: "ESQL: Expand shallow copy with vecs" -area: ES|QL -type: enhancement -issues: - - 100528 diff --git a/docs/changelog/103682.yaml b/docs/changelog/103682.yaml deleted file mode 100644 index 109e77dd053a5..0000000000000 --- a/docs/changelog/103682.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103682 -summary: Use deduced mappings for determining proper fields' format even if `deduce_mappings==false` -area: Transform -type: bug -issues: - - 103115 diff --git a/docs/changelog/103698.yaml b/docs/changelog/103698.yaml deleted file mode 100644 index d94b70b54e505..0000000000000 --- a/docs/changelog/103698.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103698 -summary: Reading points from source to reduce precision loss -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103710.yaml b/docs/changelog/103710.yaml deleted file mode 100644 index 539b9f553ccc2..0000000000000 --- a/docs/changelog/103710.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103710 -summary: List hidden shard stores by default -area: Store -type: enhancement -issues: [] diff --git a/docs/changelog/103720.yaml b/docs/changelog/103720.yaml deleted file mode 100644 index e0ee879988fa7..0000000000000 --- a/docs/changelog/103720.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103720 -summary: Add "step":"ERROR" to ILM explain response for missing policy -area: ILM+SLM -type: enhancement -issues: - - 99030 diff --git a/docs/changelog/103727.yaml b/docs/changelog/103727.yaml deleted file mode 100644 index f943ee7906d58..0000000000000 --- a/docs/changelog/103727.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103727 -summary: "ESQL: Track the rest of `DocVector`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103763.yaml b/docs/changelog/103763.yaml deleted file mode 100644 index e4d6556c77077..0000000000000 --- a/docs/changelog/103763.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103763 -summary: Ref count search response bytes -area: Search -type: enhancement -issues: - - 102657 diff --git a/docs/changelog/103783.yaml b/docs/changelog/103783.yaml deleted file mode 100644 index 47c32dd639310..0000000000000 --- a/docs/changelog/103783.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103783 -summary: "[Profiling] Mark all templates as managed" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/103807.yaml b/docs/changelog/103807.yaml deleted file mode 100644 index 3849edcc00ced..0000000000000 --- a/docs/changelog/103807.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103807 -summary: "ESQL: Add single value checks on LIKE/RLIKE pushdown" -area: ES|QL -type: bug -issues: - - 103806 diff --git a/docs/changelog/103821.yaml b/docs/changelog/103821.yaml deleted file mode 100644 index 3279059acbe3e..0000000000000 --- a/docs/changelog/103821.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103821 -summary: "ESQL: Delay finding field load infrastructure" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103846.yaml b/docs/changelog/103846.yaml deleted file mode 100644 index 0d34efabc0278..0000000000000 --- a/docs/changelog/103846.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103846 -summary: Support sampling in `counted_terms` aggregation -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/103898.yaml b/docs/changelog/103898.yaml deleted file mode 100644 index 73d89e49e8812..0000000000000 --- a/docs/changelog/103898.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 103898 -summary: Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. -area: TSDB -type: breaking -issues: [] -breaking: - title: Change `index.look_ahead_time` index setting's default value from 2 hours to 30 minutes. - area: Index setting - details: Lower the `index.look_ahead_time` index setting's max value from 2 hours to 30 minutes. - impact: > - Documents with @timestamp of 30 minutes or more in the future will be rejected. - Before documents with @timestamp of 2 hours or more in the future were rejected. - If the previous behaviour should be kept, then update the `index.look_ahead_time` setting to two hours before performing the upgrade. - notable: false diff --git a/docs/changelog/103903.yaml b/docs/changelog/103903.yaml deleted file mode 100644 index c2e5e710ac439..0000000000000 --- a/docs/changelog/103903.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103903 -summary: Account for reserved disk size -area: Allocation -type: enhancement -issues: [] diff --git a/docs/changelog/103920.yaml b/docs/changelog/103920.yaml deleted file mode 100644 index c4a0d3b06fc82..0000000000000 --- a/docs/changelog/103920.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103920 -summary: Use search to determine if cluster contains data -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103922.yaml b/docs/changelog/103922.yaml deleted file mode 100644 index 4181a6e6b1e8a..0000000000000 --- a/docs/changelog/103922.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103922 -summary: Always test for spikes and dips as well as changes in the change point aggregation -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/103928.yaml b/docs/changelog/103928.yaml deleted file mode 100644 index a9e60ba33a686..0000000000000 --- a/docs/changelog/103928.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103928 -summary: "ESQL: `MV_FIRST` and `MV_LAST`" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/103948.yaml b/docs/changelog/103948.yaml deleted file mode 100644 index 3247183fc97bb..0000000000000 --- a/docs/changelog/103948.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103948 -summary: '''elasticsearch-certutil cert'' now verifies the issuing chain of the generated - certificate' -area: TLS -type: enhancement -issues: [] diff --git a/docs/changelog/103949.yaml b/docs/changelog/103949.yaml deleted file mode 100644 index 96bd76d89ceae..0000000000000 --- a/docs/changelog/103949.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103949 -summary: "ESQL: Introduce mode setting for ENRICH" -area: ES|QL -type: feature -issues: [] diff --git a/docs/changelog/103959.yaml b/docs/changelog/103959.yaml deleted file mode 100644 index 4c8b4413b95f8..0000000000000 --- a/docs/changelog/103959.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103959 -summary: Add `ApiKey` expiration time to audit log -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103973.yaml b/docs/changelog/103973.yaml deleted file mode 100644 index f3bde76c7a559..0000000000000 --- a/docs/changelog/103973.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103973 -summary: Add stricter validation for api key expiration time -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/103996.yaml b/docs/changelog/103996.yaml deleted file mode 100644 index 699b93fff4f03..0000000000000 --- a/docs/changelog/103996.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103996 -summary: Ensure unique IDs between inference models and trained model deployments -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104006.yaml b/docs/changelog/104006.yaml deleted file mode 100644 index d840502cdefbe..0000000000000 --- a/docs/changelog/104006.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104006 -summary: Add support for more than one `inner_hit` when searching nested vectors -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/104030.yaml b/docs/changelog/104030.yaml deleted file mode 100644 index 8fe30e6258653..0000000000000 --- a/docs/changelog/104030.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104030 -summary: Add the possibility to transform WKT to WKB directly -area: Geo -type: enhancement -issues: [] diff --git a/docs/changelog/104033.yaml b/docs/changelog/104033.yaml deleted file mode 100644 index d3e167665732c..0000000000000 --- a/docs/changelog/104033.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104033 -summary: Add Query Users API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104043.yaml b/docs/changelog/104043.yaml deleted file mode 100644 index 86032e52fe208..0000000000000 --- a/docs/changelog/104043.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104043 -summary: Expose service account authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104063.yaml b/docs/changelog/104063.yaml deleted file mode 100644 index 5f59022472c75..0000000000000 --- a/docs/changelog/104063.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104063 -summary: Add serverless scopes for Connector APIs -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104077.yaml b/docs/changelog/104077.yaml deleted file mode 100644 index 7550e7388a29d..0000000000000 --- a/docs/changelog/104077.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104077 -summary: Retry updates to model snapshot ID on job config -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104091.yaml b/docs/changelog/104091.yaml deleted file mode 100644 index 42609e42471f8..0000000000000 --- a/docs/changelog/104091.yaml +++ /dev/null @@ -1,11 +0,0 @@ -pr: 104091 -summary: "[ESQL] Remove is_nan, is_finite, and `is_infinite`" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "[ESQL] Remove is_nan, is_finite, and `is_infinite`" - area: REST API - details: Removes the functions `is_nan`, `is_finite`, and `is_infinite`. - impact: Attempting to use the above functions will now be a planner time error. These functions are no longer supported. - notable: false diff --git a/docs/changelog/104092.yaml b/docs/changelog/104092.yaml deleted file mode 100644 index b40637d51765e..0000000000000 --- a/docs/changelog/104092.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104092 -summary: Ingest geoip processor cache 'no results' from the database -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104099.yaml b/docs/changelog/104099.yaml deleted file mode 100644 index b4164896a5923..0000000000000 --- a/docs/changelog/104099.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104099 -summary: Fix `require_alias` implicit true value on presence -area: Indices APIs -type: bug -issues: - - 103945 diff --git a/docs/changelog/104113.yaml b/docs/changelog/104113.yaml deleted file mode 100644 index 3068291606578..0000000000000 --- a/docs/changelog/104113.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104113 -summary: "X-pack/plugin/apm-data: fix `@custom` pipeline support" -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/104118.yaml b/docs/changelog/104118.yaml deleted file mode 100644 index f5afb199bc5eb..0000000000000 --- a/docs/changelog/104118.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104118 -summary: "ESQL: add `date_diff` function" -area: ES|QL -type: enhancement -issues: - - 101942 diff --git a/docs/changelog/104122.yaml b/docs/changelog/104122.yaml deleted file mode 100644 index a88d7499bd44e..0000000000000 --- a/docs/changelog/104122.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104122 -summary: Consider currently refreshing data in the memory usage of refresh -area: Engine -type: bug -issues: [] diff --git a/docs/changelog/104132.yaml b/docs/changelog/104132.yaml deleted file mode 100644 index 87fe94ddcfcea..0000000000000 --- a/docs/changelog/104132.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104132 -summary: Add support for the `simple_query_string` to the Query API Key API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104142.yaml b/docs/changelog/104142.yaml deleted file mode 100644 index 08bf9ef759090..0000000000000 --- a/docs/changelog/104142.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104142 -summary: Expose token authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104150.yaml b/docs/changelog/104150.yaml deleted file mode 100644 index c910542dcf7f6..0000000000000 --- a/docs/changelog/104150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104150 -summary: Correct profiled rewrite time for knn with a pre-filter -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104155.yaml b/docs/changelog/104155.yaml deleted file mode 100644 index 04d6a9920310a..0000000000000 --- a/docs/changelog/104155.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104155 -summary: "Updated `missingTrainedModel` message to include: you may need to create\ - \ it" -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104182.yaml b/docs/changelog/104182.yaml deleted file mode 100644 index b5cf10f941cc6..0000000000000 --- a/docs/changelog/104182.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104182 -summary: "Apm-data: fix `@custom` component templates" -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/104200.yaml b/docs/changelog/104200.yaml deleted file mode 100644 index bc2aa2507f0ec..0000000000000 --- a/docs/changelog/104200.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104200 -summary: Expose realms authentication metrics -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104209.yaml b/docs/changelog/104209.yaml deleted file mode 100644 index fabf06fb99c2e..0000000000000 --- a/docs/changelog/104209.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 104209 -summary: '`DesiredNode:` deprecate `node_version` field and make it optional (unused) - in current parser' -area: Distributed -type: deprecation -issues: [] -deprecation: - title: '`DesiredNode:` deprecate `node_version` field and make it optional for the current version' - area: REST API - details: The desired_node API includes a `node_version` field to perform validation on the new node version required. - This kind of check is too broad, and it's better done by external logic, so it has been removed, making the - `node_version` field not necessary. The field will be removed in a later version. - impact: Users should update their usages of `desired_node` to not include the `node_version` field anymore. diff --git a/docs/changelog/104218.yaml b/docs/changelog/104218.yaml deleted file mode 100644 index b3051008dc47b..0000000000000 --- a/docs/changelog/104218.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104218 -summary: "Support ST_CENTROID over spatial points" -area: "ES|QL" -type: enhancement -issues: - - 104656 diff --git a/docs/changelog/104227.yaml b/docs/changelog/104227.yaml deleted file mode 100644 index 64dcf844f23f2..0000000000000 --- a/docs/changelog/104227.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104227 -summary: Avoid wrapping searchers multiple times in mget -area: CRUD -type: enhancement -issues: - - 85069 diff --git a/docs/changelog/104230.yaml b/docs/changelog/104230.yaml deleted file mode 100644 index 94184f64586f5..0000000000000 --- a/docs/changelog/104230.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104230 -summary: Undeploy elser when inference model deleted -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104265.yaml b/docs/changelog/104265.yaml deleted file mode 100644 index 88c3d72ee81d0..0000000000000 --- a/docs/changelog/104265.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104265 -summary: Remove `hashCode` and `equals` from `OperationModeUpdateTask` -area: ILM+SLM -type: bug -issues: - - 100871 diff --git a/docs/changelog/104269.yaml b/docs/changelog/104269.yaml deleted file mode 100644 index 8d4b0fc5d5198..0000000000000 --- a/docs/changelog/104269.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104269 -summary: "ESQL: Support loading shapes from source into WKB blocks" -area: "ES|QL" -type: enhancement -issues: [] diff --git a/docs/changelog/104309.yaml b/docs/changelog/104309.yaml deleted file mode 100644 index 4467eb6722afc..0000000000000 --- a/docs/changelog/104309.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104309 -summary: "ESQL: Add TO_UPPER and TO_LOWER functions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104320.yaml b/docs/changelog/104320.yaml deleted file mode 100644 index d2b0d09070fb9..0000000000000 --- a/docs/changelog/104320.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104320 -summary: Hot-reloadable LDAP bind password -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104334.yaml b/docs/changelog/104334.yaml deleted file mode 100644 index ff242ee15141b..0000000000000 --- a/docs/changelog/104334.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104334 -summary: Automatically download the ELSER model when PUT in `_inference` -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104355.yaml b/docs/changelog/104355.yaml deleted file mode 100644 index 2a100faf3c35f..0000000000000 --- a/docs/changelog/104355.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104355 -summary: Prepare enrich plan to support multi clusters -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104356.yaml b/docs/changelog/104356.yaml deleted file mode 100644 index e0cb2311fbfc9..0000000000000 --- a/docs/changelog/104356.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104356 -summary: "[Profiling] Extract properties faster from source" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104363.yaml b/docs/changelog/104363.yaml deleted file mode 100644 index 9d97991ea7fab..0000000000000 --- a/docs/changelog/104363.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104363 -summary: Apply windowing and chunking to long documents -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104386.yaml b/docs/changelog/104386.yaml deleted file mode 100644 index 41b6a17424bbd..0000000000000 --- a/docs/changelog/104386.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104386 -summary: "X-pack/plugin/apm-data: add dynamic setting for enabling template registry" -area: Data streams -type: enhancement -issues: - - 104385 diff --git a/docs/changelog/104387.yaml b/docs/changelog/104387.yaml deleted file mode 100644 index f10084d8c4b32..0000000000000 --- a/docs/changelog/104387.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104387 -summary: "ESQL: Nested expressions inside stats command" -area: ES|QL -type: enhancement -issues: - - 99828 diff --git a/docs/changelog/104394.yaml b/docs/changelog/104394.yaml deleted file mode 100644 index 39fbfc0c4ea28..0000000000000 --- a/docs/changelog/104394.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104394 -summary: Endpoint to find positions of Grok pattern matches -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104396.yaml b/docs/changelog/104396.yaml deleted file mode 100644 index 586fdc1b22624..0000000000000 --- a/docs/changelog/104396.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104396 -summary: Report current master in `PeerFinder` -area: Cluster Coordination -type: enhancement -issues: [] diff --git a/docs/changelog/104406.yaml b/docs/changelog/104406.yaml deleted file mode 100644 index d26ef664abc07..0000000000000 --- a/docs/changelog/104406.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104406 -summary: Support patch transport version from 8.12 -area: Downsampling -type: enhancement -issues: [] diff --git a/docs/changelog/104407.yaml b/docs/changelog/104407.yaml deleted file mode 100644 index 1ce6b6f97f580..0000000000000 --- a/docs/changelog/104407.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104407 -summary: Set read timeout for fetching IMDSv2 token -area: Discovery-Plugins -type: enhancement -issues: - - 104244 diff --git a/docs/changelog/104408.yaml b/docs/changelog/104408.yaml deleted file mode 100644 index 7303740168ea5..0000000000000 --- a/docs/changelog/104408.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104408 -summary: Move `TransportTermsEnumAction` coordination off transport threads -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104433.yaml b/docs/changelog/104433.yaml deleted file mode 100644 index b3b292923e290..0000000000000 --- a/docs/changelog/104433.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104433 -summary: Added 3 automatic restarts for `pytorch_inference` processes which stop unexpectedly -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104440.yaml b/docs/changelog/104440.yaml deleted file mode 100644 index 4242b7786f05f..0000000000000 --- a/docs/changelog/104440.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104440 -summary: Fix write index resolution when an alias is pointing to a TSDS -area: Data streams -type: bug -issues: - - 104189 diff --git a/docs/changelog/104460.yaml b/docs/changelog/104460.yaml deleted file mode 100644 index c92acdd5cb8ad..0000000000000 --- a/docs/changelog/104460.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104460 -summary: Dyamically adjust node metrics cache expire -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/104483.yaml b/docs/changelog/104483.yaml deleted file mode 100644 index 99917b4e8e017..0000000000000 --- a/docs/changelog/104483.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104483 -summary: Make `task_type` optional in `_inference` APIs -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104500.yaml b/docs/changelog/104500.yaml deleted file mode 100644 index 61c45c6dde3cb..0000000000000 --- a/docs/changelog/104500.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104500 -summary: Thread pool metrics -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/104505.yaml b/docs/changelog/104505.yaml deleted file mode 100644 index 4d0c482a88d85..0000000000000 --- a/docs/changelog/104505.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104505 -summary: "Revert \"x-pack/plugin/apm-data: download geoip DB on pipeline creation\"" -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/104529.yaml b/docs/changelog/104529.yaml deleted file mode 100644 index 5b223a0924d86..0000000000000 --- a/docs/changelog/104529.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104529 -summary: Add rest spec for Query User API -area: Client -type: enhancement -issues: [] diff --git a/docs/changelog/104553.yaml b/docs/changelog/104553.yaml deleted file mode 100644 index e1f5c974bd74e..0000000000000 --- a/docs/changelog/104553.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104553 -summary: "ESQL: Fix a bug loading unindexed text fields" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104559.yaml b/docs/changelog/104559.yaml deleted file mode 100644 index d6d030783c4cc..0000000000000 --- a/docs/changelog/104559.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104559 -summary: Adding support for Cohere inference service -area: Machine Learning -type: enhancement -issues: [] diff --git a/docs/changelog/104573.yaml b/docs/changelog/104573.yaml deleted file mode 100644 index a333bc3024772..0000000000000 --- a/docs/changelog/104573.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104573 -summary: Fix logger Strings.format calls -area: Distributed -type: bug -issues: [] diff --git a/docs/changelog/104574.yaml b/docs/changelog/104574.yaml deleted file mode 100644 index 68be002142fd9..0000000000000 --- a/docs/changelog/104574.yaml +++ /dev/null @@ -1,10 +0,0 @@ -pr: 104574 -summary: Deprecate `client.type` -area: Infra/Core -type: deprecation -issues: [] -deprecation: - title: Deprecate `client.type` - area: Cluster and node setting - details: The node setting `client.type` has been ignored since the node client was removed in 8.0. The setting is now deprecated and will be removed in a future release. - impact: Remove the `client.type` setting from `elasticsearch.yml` diff --git a/docs/changelog/104575.yaml b/docs/changelog/104575.yaml deleted file mode 100644 index ba17b705fca10..0000000000000 --- a/docs/changelog/104575.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104575 -summary: Introduce Alias.unwrap method -area: "Query Languages" -type: enhancement -issues: [] diff --git a/docs/changelog/104581.yaml b/docs/changelog/104581.yaml deleted file mode 100644 index 5f9b71acbfed7..0000000000000 --- a/docs/changelog/104581.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104581 -summary: Fix bogus assertion tripped by force-executed tasks -area: Infra/Core -type: bug -issues: - - 104580 diff --git a/docs/changelog/104594.yaml b/docs/changelog/104594.yaml deleted file mode 100644 index 7729eb028f68e..0000000000000 --- a/docs/changelog/104594.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104594 -summary: Support of `match` for the Query API Key API -area: Authentication -type: enhancement -issues: [] diff --git a/docs/changelog/104614.yaml b/docs/changelog/104614.yaml deleted file mode 100644 index 9b2c25a643825..0000000000000 --- a/docs/changelog/104614.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104614 -summary: Extend `repository_integrity` health indicator for unknown and invalid repos -area: Health -type: enhancement -issues: - - 103784 diff --git a/docs/changelog/104625.yaml b/docs/changelog/104625.yaml deleted file mode 100644 index 28951936107fb..0000000000000 --- a/docs/changelog/104625.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104625 -summary: "Add support for the `type` parameter, for sorting, to the Query API Key\ - \ API" -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104636.yaml b/docs/changelog/104636.yaml deleted file mode 100644 index d74682f2eba18..0000000000000 --- a/docs/changelog/104636.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104636 -summary: Modifying request builders -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104643.yaml b/docs/changelog/104643.yaml deleted file mode 100644 index 5a09cd081b376..0000000000000 --- a/docs/changelog/104643.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104643 -summary: "[Connectors API] Implement update service type action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104648.yaml b/docs/changelog/104648.yaml deleted file mode 100644 index e8bb5fea392ac..0000000000000 --- a/docs/changelog/104648.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104648 -summary: "[Connector API] Implement update `index_name` action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104654.yaml b/docs/changelog/104654.yaml deleted file mode 100644 index 1d007ad39a854..0000000000000 --- a/docs/changelog/104654.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104654 -summary: "[Connectors API] Implement update native action endpoint" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104665.yaml b/docs/changelog/104665.yaml deleted file mode 100644 index a7043cbdc9dda..0000000000000 --- a/docs/changelog/104665.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104665 -summary: Restrict usage of certain aggregations when in sort order execution is required -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/104666.yaml b/docs/changelog/104666.yaml deleted file mode 100644 index 5009052bd5b0a..0000000000000 --- a/docs/changelog/104666.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104666 -summary: Require the name field for `inner_hits` for collapse -area: Search -type: bug -issues: [] diff --git a/docs/changelog/104674.yaml b/docs/changelog/104674.yaml deleted file mode 100644 index 12951488f89ce..0000000000000 --- a/docs/changelog/104674.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104674 -summary: "[Profiling] Speed up processing of stacktraces" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104718.yaml b/docs/changelog/104718.yaml deleted file mode 100644 index ffe889bb28a3e..0000000000000 --- a/docs/changelog/104718.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104718 -summary: "ESQL: Fix replacement of nested expressions in aggs with multiple parameters" -area: ES|QL -type: bug -issues: - - 104706 diff --git a/docs/changelog/104721.yaml b/docs/changelog/104721.yaml deleted file mode 100644 index 3bfe8a21646c8..0000000000000 --- a/docs/changelog/104721.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104721 -summary: Add default rollover conditions to ILM explain API response -area: ILM+SLM -type: enhancement -issues: - - 103395 diff --git a/docs/changelog/104730.yaml b/docs/changelog/104730.yaml deleted file mode 100644 index fe5e2e157a004..0000000000000 --- a/docs/changelog/104730.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104730 -summary: "[Profiling] Support downsampling of generic events" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104750.yaml b/docs/changelog/104750.yaml deleted file mode 100644 index 948b19a5eaaa6..0000000000000 --- a/docs/changelog/104750.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104750 -summary: "[Connectors API] Implement connector status update action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104753.yaml b/docs/changelog/104753.yaml deleted file mode 100644 index f95fd3da44084..0000000000000 --- a/docs/changelog/104753.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104753 -summary: Upgrade to Lucene 9.9.2 -area: Search -type: upgrade -issues: [] diff --git a/docs/changelog/104778.yaml b/docs/changelog/104778.yaml deleted file mode 100644 index 7dae338efc09c..0000000000000 --- a/docs/changelog/104778.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104778 -summary: Adding a `RequestBuilder` interface -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104784.yaml b/docs/changelog/104784.yaml deleted file mode 100644 index 3d60222c2aa19..0000000000000 --- a/docs/changelog/104784.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104784 -summary: "Fix blob cache race, decay, time dependency" -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/104787.yaml b/docs/changelog/104787.yaml deleted file mode 100644 index 9c4ce688ce6ad..0000000000000 --- a/docs/changelog/104787.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104787 -summary: Add troubleshooting docs link to `PeerFinder` logs -area: Cluster Coordination -type: enhancement -issues: [] diff --git a/docs/changelog/104796.yaml b/docs/changelog/104796.yaml deleted file mode 100644 index a683f9ce22d49..0000000000000 --- a/docs/changelog/104796.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104796 -summary: "ESQL: Pre-allocate rows in TopNOperator" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104840.yaml b/docs/changelog/104840.yaml deleted file mode 100644 index 5b7d83a966dbc..0000000000000 --- a/docs/changelog/104840.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104840 -summary: Support enrich ANY mode in cross clusters query -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104859.yaml b/docs/changelog/104859.yaml deleted file mode 100644 index 55e5758e31ae2..0000000000000 --- a/docs/changelog/104859.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104859 -summary: ES - document observing with rejections -area: Infra/Core -type: enhancement -issues: [] diff --git a/docs/changelog/104872.yaml b/docs/changelog/104872.yaml deleted file mode 100644 index ad70946be02ae..0000000000000 --- a/docs/changelog/104872.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104872 -summary: Add new int8_flat and flat vector index types -area: Vector Search -type: enhancement -issues: [] diff --git a/docs/changelog/104878.yaml b/docs/changelog/104878.yaml deleted file mode 100644 index 2ae6d5c0c1da3..0000000000000 --- a/docs/changelog/104878.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104878 -summary: "Transforms: Adding basic stats API param" -area: Transform -type: enhancement -issues: [] diff --git a/docs/changelog/104893.yaml b/docs/changelog/104893.yaml deleted file mode 100644 index e4685e160f8f8..0000000000000 --- a/docs/changelog/104893.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104893 -summary: Release resources in `BestBucketsDeferringCollector` earlier -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/104895.yaml b/docs/changelog/104895.yaml deleted file mode 100644 index 020dcff891f03..0000000000000 --- a/docs/changelog/104895.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104895 -summary: Aggs support for Query API Key Information API -area: Security -type: enhancement -issues: [] diff --git a/docs/changelog/104905.yaml b/docs/changelog/104905.yaml deleted file mode 100644 index 80e06dc3b0cf5..0000000000000 --- a/docs/changelog/104905.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104905 -summary: "Execute lazy rollover with an internal dedicated user #104732" -area: Data streams -type: bug -issues: - - 104732 diff --git a/docs/changelog/104909.yaml b/docs/changelog/104909.yaml deleted file mode 100644 index 6d250c22a745a..0000000000000 --- a/docs/changelog/104909.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104909 -summary: "[Connectors API] Relax strict response parsing for get/list operations" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104911.yaml b/docs/changelog/104911.yaml deleted file mode 100644 index 17a335337e345..0000000000000 --- a/docs/changelog/104911.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 104911 -summary: "ES|QL: Improve type validation in aggs for UNSIGNED_LONG better support\ - \ for VERSION" -area: ES|QL -type: bug -issues: - - 102961 diff --git a/docs/changelog/104927.yaml b/docs/changelog/104927.yaml deleted file mode 100644 index e0e098ba10b7b..0000000000000 --- a/docs/changelog/104927.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104927 -summary: Adding `ActionRequestLazyBuilder` implementation of `RequestBuilder` -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/104936.yaml b/docs/changelog/104936.yaml deleted file mode 100644 index cfa170f550681..0000000000000 --- a/docs/changelog/104936.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104936 -summary: Support enrich coordinator mode -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104943.yaml b/docs/changelog/104943.yaml deleted file mode 100644 index 094ce66c4f994..0000000000000 --- a/docs/changelog/104943.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104943 -summary: Fix server cli to always pass through exit code -area: Infra/CLI -type: bug -issues: [] diff --git a/docs/changelog/104949.yaml b/docs/changelog/104949.yaml deleted file mode 100644 index c2682fc911f1d..0000000000000 --- a/docs/changelog/104949.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104949 -summary: Add text_embedding inference service with multilingual-e5 and custom eland models -area: Machine Learning -type: enhancement -issues: [ ] diff --git a/docs/changelog/104958.yaml b/docs/changelog/104958.yaml deleted file mode 100644 index 936342db03b45..0000000000000 --- a/docs/changelog/104958.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104958 -summary: "ESQL: Extend STATS command to support aggregate expressions" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104982.yaml b/docs/changelog/104982.yaml deleted file mode 100644 index 62194aa68b80c..0000000000000 --- a/docs/changelog/104982.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104982 -summary: "[Connectors API] Add new field `api_key_secret_id` to Connector" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/104993.yaml b/docs/changelog/104993.yaml deleted file mode 100644 index df9875563d5a1..0000000000000 --- a/docs/changelog/104993.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104993 -summary: Support enrich remote mode -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/104996.yaml b/docs/changelog/104996.yaml deleted file mode 100644 index b94711111adfe..0000000000000 --- a/docs/changelog/104996.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104996 -summary: "Enhancement: Metrics for Search Took Times using Action Listeners" -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105015.yaml b/docs/changelog/105015.yaml deleted file mode 100644 index 94ffc2b0e58d5..0000000000000 --- a/docs/changelog/105015.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105015 -summary: Modify name of threadpool metric for rejected -area: Infra/Metrics -type: enhancement -issues: [] diff --git a/docs/changelog/105044.yaml b/docs/changelog/105044.yaml deleted file mode 100644 index 5a9a11f928f98..0000000000000 --- a/docs/changelog/105044.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105044 -summary: Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/105055.yaml b/docs/changelog/105055.yaml deleted file mode 100644 index 0db70a6b9e558..0000000000000 --- a/docs/changelog/105055.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105055 -summary: "Do not enable APM agent 'instrument', it's not required for manual tracing" -area: Infra/Core -type: bug -issues: [] diff --git a/docs/changelog/105062.yaml b/docs/changelog/105062.yaml deleted file mode 100644 index 928786f62381a..0000000000000 --- a/docs/changelog/105062.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105062 -summary: Nest pass-through objects within objects -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/105064.yaml b/docs/changelog/105064.yaml deleted file mode 100644 index 81c62b3148f1c..0000000000000 --- a/docs/changelog/105064.yaml +++ /dev/null @@ -1,17 +0,0 @@ -pr: 105064 -summary: "ES|QL: remove PROJECT keyword from the grammar" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "ES|QL: remove PROJECT keyword from the grammar" - area: REST API - details: "Removes the PROJECT keyword (an alias for KEEP) from ES|QL grammar" - impact: "Before this change, users could use PROJECT as an alias for KEEP in ESQL queries,\ - \ (eg. 'FROM idx | PROJECT name, surname')\ - \ the parser replaced PROJECT with KEEP, emitted a warning:\ - \ 'PROJECT command is no longer supported, please use KEEP instead'\ - \ and the query was executed normally.\ - \ With this change, PROJECT command is no longer recognized by the query parser;\ - \ queries using PROJECT command now return a parsing exception." - notable: false diff --git a/docs/changelog/105081.yaml b/docs/changelog/105081.yaml deleted file mode 100644 index efa686bd7b4a4..0000000000000 --- a/docs/changelog/105081.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105081 -summary: For empty mappings use a `LocalRelation` -area: ES|QL -type: bug -issues: - - 104809 diff --git a/docs/changelog/105088.yaml b/docs/changelog/105088.yaml deleted file mode 100644 index 8b5d1fa7f9e02..0000000000000 --- a/docs/changelog/105088.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105088 -summary: "ESQL: Speed up reading many nulls" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105089.yaml b/docs/changelog/105089.yaml deleted file mode 100644 index 6f43c58af8a41..0000000000000 --- a/docs/changelog/105089.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105089 -summary: Return results in order -area: Transform -type: bug -issues: - - 104847 diff --git a/docs/changelog/105103.yaml b/docs/changelog/105103.yaml deleted file mode 100644 index 599d2e3666e4b..0000000000000 --- a/docs/changelog/105103.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105103 -summary: Do not record s3 http request time when it is not available -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/105105.yaml b/docs/changelog/105105.yaml deleted file mode 100644 index 848a9637d1388..0000000000000 --- a/docs/changelog/105105.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105105 -summary: Add s3 `HeadObject` request to request stats -area: Snapshot/Restore -type: enhancement -issues: [] diff --git a/docs/changelog/105131.yaml b/docs/changelog/105131.yaml deleted file mode 100644 index 36993527da583..0000000000000 --- a/docs/changelog/105131.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105131 -summary: "[Connector API] Support filtering by name, index name in list action" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105150.yaml b/docs/changelog/105150.yaml deleted file mode 100644 index d9fc3d337f952..0000000000000 --- a/docs/changelog/105150.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105150 -summary: Remove `SearchException` usages without a proper status code -area: Search -type: bug -issues: [] diff --git a/docs/changelog/105163.yaml b/docs/changelog/105163.yaml deleted file mode 100644 index f28bf4de14792..0000000000000 --- a/docs/changelog/105163.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105163 -summary: Add stable `ThreadPool` constructor to `LogstashInternalBridge` -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/105164.yaml b/docs/changelog/105164.yaml deleted file mode 100644 index 7affb0911bc6d..0000000000000 --- a/docs/changelog/105164.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105164 -summary: Remove duplicate checkpoint audits -area: Transform -type: bug -issues: - - 105106 diff --git a/docs/changelog/105178.yaml b/docs/changelog/105178.yaml deleted file mode 100644 index e8fc9cfd6898f..0000000000000 --- a/docs/changelog/105178.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105178 -summary: "[Connector API] Support filtering connectors by service type and a query" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105180.yaml b/docs/changelog/105180.yaml deleted file mode 100644 index ac7ed20f151b7..0000000000000 --- a/docs/changelog/105180.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105180 -summary: Use new `ignore_dynamic_beyond_limit` in logs and metric data streams -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/105192.yaml b/docs/changelog/105192.yaml deleted file mode 100644 index b15d58ef40fe7..0000000000000 --- a/docs/changelog/105192.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105192 -summary: Allow transforms to use PIT with remote clusters again -area: Transform -type: enhancement -issues: - - 104518 diff --git a/docs/changelog/105196.yaml b/docs/changelog/105196.yaml deleted file mode 100644 index 8fe7b50cfa989..0000000000000 --- a/docs/changelog/105196.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105196 -summary: Adding a custom exception for problems with the graph of pipelines to be - applied to a document -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/105207.yaml b/docs/changelog/105207.yaml deleted file mode 100644 index 00d227248abfb..0000000000000 --- a/docs/changelog/105207.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105207 -summary: Introduce an `AggregatorReducer` to reduce the footprint of aggregations - in the coordinating node -area: Aggregations -type: enhancement -issues: [] diff --git a/docs/changelog/105221.yaml b/docs/changelog/105221.yaml deleted file mode 100644 index 2ef64ef110d95..0000000000000 --- a/docs/changelog/105221.yaml +++ /dev/null @@ -1,14 +0,0 @@ -pr: 105221 -summary: "ESQL: Grammar - FROM METADATA no longer requires []" -area: ES|QL -type: breaking -issues: [] -breaking: - title: "ESQL: Grammar - FROM METADATA no longer requires []" - area: REST API - details: "Remove [ ] for METADATA option inside FROM command statements" - impact: "Previously to return metadata fields, one had to use square brackets:\ - \ (eg. 'FROM index [METADATA _index]').\ - \ This is no longer needed: the [ ] are dropped and do not have to be specified,\ - \ thus simplifying the command above to:'FROM index METADATA _index'." - notable: false diff --git a/docs/changelog/105223.yaml b/docs/changelog/105223.yaml deleted file mode 100644 index e2a95fcd6ba48..0000000000000 --- a/docs/changelog/105223.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105223 -summary: "x-pack/plugin/apm-data: Add a new field transaction.profiler_stack_trace_ids to traces-apm@mappings.yaml" -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/105232.yaml b/docs/changelog/105232.yaml deleted file mode 100644 index a2ad7ad9451e9..0000000000000 --- a/docs/changelog/105232.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105232 -summary: Execute SAML authentication on the generic threadpool -area: Authentication -type: bug -issues: - - 104962 diff --git a/docs/changelog/105249.yaml b/docs/changelog/105249.yaml deleted file mode 100644 index 979253e452008..0000000000000 --- a/docs/changelog/105249.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105249 -summary: "[Connector API] Support updating configuration values only" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105259.yaml b/docs/changelog/105259.yaml deleted file mode 100644 index a360bc8bc1672..0000000000000 --- a/docs/changelog/105259.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105259 -summary: Lower G1 minimum full GC interval -area: Infra/Circuit Breakers -type: enhancement -issues: [] diff --git a/docs/changelog/105265.yaml b/docs/changelog/105265.yaml deleted file mode 100644 index 70231dbfabc52..0000000000000 --- a/docs/changelog/105265.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105265 -summary: Improving the performance of the ingest simulate verbose API -area: "Ingest Node" -type: enhancement -issues: [] diff --git a/docs/changelog/105269.yaml b/docs/changelog/105269.yaml deleted file mode 100644 index acf05b05ecfc4..0000000000000 --- a/docs/changelog/105269.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105269 -summary: Reserve bytes before serializing page -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105272.yaml b/docs/changelog/105272.yaml deleted file mode 100644 index 1032a17fc10f8..0000000000000 --- a/docs/changelog/105272.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105272 -summary: "Stop the periodic health logger when es is stopping" -area: Health -type: bug -issues: [] diff --git a/docs/changelog/105273.yaml b/docs/changelog/105273.yaml deleted file mode 100644 index 83db9eac2a14a..0000000000000 --- a/docs/changelog/105273.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105273 -summary: "x-pack/plugin/core: make automatic rollovers lazy" -area: Data streams -type: enhancement -issues: - - 104083 diff --git a/docs/changelog/105289.yaml b/docs/changelog/105289.yaml deleted file mode 100644 index a51778a93beb8..0000000000000 --- a/docs/changelog/105289.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105289 -summary: "[Connector API] Change required privileges to indices:data/read(write)" -area: Application -type: enhancement -issues: [] diff --git a/docs/changelog/105299.yaml b/docs/changelog/105299.yaml deleted file mode 100644 index b1f9b3ac4a2aa..0000000000000 --- a/docs/changelog/105299.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105299 -summary: Conditionally send the dimensions field as part of the openai requests -area: Machine Learning -type: enhancement -issues: - - 105005 diff --git a/docs/changelog/105325.yaml b/docs/changelog/105325.yaml deleted file mode 100644 index ab3724efca30f..0000000000000 --- a/docs/changelog/105325.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105325 -summary: "ESQL: Fix Analyzer to not interpret escaped * as a pattern" -area: ES|QL -type: bug -issues: - - 104955 diff --git a/docs/changelog/105334.yaml b/docs/changelog/105334.yaml deleted file mode 100644 index 498fdf4113b3c..0000000000000 --- a/docs/changelog/105334.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105334 -summary: Upgrade ANTLR4 to 4.13.1 -area: Query Languages -type: upgrade -issues: - - 102953 diff --git a/docs/changelog/105346.yaml b/docs/changelog/105346.yaml deleted file mode 100644 index 7c6eab93f6c10..0000000000000 --- a/docs/changelog/105346.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105346 -summary: Allow GET inference models by user a with read only permission -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105371.yaml b/docs/changelog/105371.yaml deleted file mode 100644 index 500c64b677a10..0000000000000 --- a/docs/changelog/105371.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105371 -summary: "ESQL: Add plan consistency verification after each optimizer" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105373.yaml b/docs/changelog/105373.yaml deleted file mode 100644 index f9d3c718f7ae3..0000000000000 --- a/docs/changelog/105373.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105373 -summary: "Fix parsing of flattened fields within subobjects: false" -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/105391.yaml b/docs/changelog/105391.yaml deleted file mode 100644 index 6b9b39c00a150..0000000000000 --- a/docs/changelog/105391.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105391 -summary: Catch all the potential exceptions in the ingest processor code -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105403.yaml b/docs/changelog/105403.yaml deleted file mode 100644 index f855c0e8ed94f..0000000000000 --- a/docs/changelog/105403.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105403 -summary: "ESQL: make `cidr_match` foldable" -area: ES|QL -type: bug -issues: - - 105376 diff --git a/docs/changelog/105427.yaml b/docs/changelog/105427.yaml deleted file mode 100644 index e73853b9dce92..0000000000000 --- a/docs/changelog/105427.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105427 -summary: Adding `executedPipelines` to the `IngestDocument` copy constructor -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/105428.yaml b/docs/changelog/105428.yaml deleted file mode 100644 index 49a80150b4303..0000000000000 --- a/docs/changelog/105428.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105428 -summary: Limiting the number of nested pipelines that can be executed -area: Ingest Node -type: enhancement -issues: [] diff --git a/docs/changelog/105429.yaml b/docs/changelog/105429.yaml deleted file mode 100644 index 706375649b7ca..0000000000000 --- a/docs/changelog/105429.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105429 -summary: Changed system auditor to use levels -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105440.yaml b/docs/changelog/105440.yaml deleted file mode 100644 index 8aacac3e641bf..0000000000000 --- a/docs/changelog/105440.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105440 -summary: Avoid false-positive matches on intermediate objects in `ecs@mappings` -area: Data streams -type: bug -issues: - - 102794 diff --git a/docs/changelog/105442.yaml b/docs/changelog/105442.yaml deleted file mode 100644 index b0af1b634d984..0000000000000 --- a/docs/changelog/105442.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105442 -summary: Handling exceptions on watcher reload -area: Watcher -type: bug -issues: - - 69842 diff --git a/docs/changelog/105458.yaml b/docs/changelog/105458.yaml deleted file mode 100644 index 2bab415884975..0000000000000 --- a/docs/changelog/105458.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105458 -summary: The OpenAI model parameter should be in service settings not task settings. Move the configuration field to service settings -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/105468.yaml b/docs/changelog/105468.yaml deleted file mode 100644 index 0de36a71862a4..0000000000000 --- a/docs/changelog/105468.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105468 -summary: Include better output in profiling & `toString` for automaton based queries -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105476.yaml b/docs/changelog/105476.yaml deleted file mode 100644 index 6520df78520e7..0000000000000 --- a/docs/changelog/105476.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105476 -summary: "ESQL: Fix bug in grammar that allowed spaces inside id pattern" -area: ES|QL -type: bug -issues: - - 105441 diff --git a/docs/changelog/105486.yaml b/docs/changelog/105486.yaml deleted file mode 100644 index befdaec2301c6..0000000000000 --- a/docs/changelog/105486.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105486 -summary: Fix use-after-free at event-loop shutdown -area: Network -type: bug -issues: [] diff --git a/docs/changelog/105499.yaml b/docs/changelog/105499.yaml deleted file mode 100644 index bfc297411efa7..0000000000000 --- a/docs/changelog/105499.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105499 -summary: Fix a bug where destination index aliases are not set up for an unattended transform -area: Transform -type: bug -issues: [] diff --git a/docs/changelog/105546.yaml b/docs/changelog/105546.yaml deleted file mode 100644 index 0b54e124f2495..0000000000000 --- a/docs/changelog/105546.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105546 -summary: '`GlobalOrdCardinalityAggregator` should use `HyperLogLogPlusPlus` instead - of `HyperLogLogPlusPlusSparse`' -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/105578.yaml b/docs/changelog/105578.yaml deleted file mode 100644 index 1ffa0128c1d0a..0000000000000 --- a/docs/changelog/105578.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 105578 -summary: Upgrade to Lucene 9.10.0 -area: Search -type: enhancement -issues: [] -highlight: - title: New Lucene 9.10 release - body: |- - - https://github.com/apache/lucene/pull/13090: Prevent humongous allocations in ScalarQuantizer when building quantiles. - - https://github.com/apache/lucene/pull/12962: Speedup concurrent multi-segment HNSW graph search - - https://github.com/apache/lucene/pull/13033: Range queries on numeric/date/ip fields now exit earlier on segments whose values don't intersect with the query range. This should especially help when there are other required clauses in the `bool` query and when the range filter is narrow, e.g. filtering on the last 5 minutes. - - https://github.com/apache/lucene/pull/13026: `bool` queries that mix `filter` and `should` clauses will now propagate minimum competitive scores through the `should` clauses. This should yield speedups when sorting by descending score. - notable: true diff --git a/docs/changelog/105588.yaml b/docs/changelog/105588.yaml deleted file mode 100644 index e43ff8cd75c60..0000000000000 --- a/docs/changelog/105588.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105588 -summary: '`URLRepository` should not block shutdown' -area: Snapshot/Restore -type: bug -issues: [] diff --git a/docs/changelog/105593.yaml b/docs/changelog/105593.yaml deleted file mode 100644 index 4eef0d9404f42..0000000000000 --- a/docs/changelog/105593.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105593 -summary: "ESQL: push down \"[text_field] is not null\"" -area: ES|QL -type: enhancement -issues: [] diff --git a/docs/changelog/105633.yaml b/docs/changelog/105633.yaml deleted file mode 100644 index b19ec67f4602a..0000000000000 --- a/docs/changelog/105633.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105633 -summary: "[Connector API] Bugfix: support list type in filtering advenced snippet\ - \ value" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/105650.yaml b/docs/changelog/105650.yaml deleted file mode 100644 index f43da5b315f4c..0000000000000 --- a/docs/changelog/105650.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105650 -summary: "ESQL: Fix wrong attribute shadowing in pushdown rules" -area: ES|QL -type: bug -issues: - - 105434 diff --git a/docs/changelog/105691.yaml b/docs/changelog/105691.yaml deleted file mode 100644 index 89797782b06ee..0000000000000 --- a/docs/changelog/105691.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105691 -summary: "ES|QL: Disable optimizations that rely on Expression.nullable()" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/105770.yaml b/docs/changelog/105770.yaml deleted file mode 100644 index ec8ae4f380e2f..0000000000000 --- a/docs/changelog/105770.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105770 -summary: Field-caps field has value lookup use map instead of looping array -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105772.yaml b/docs/changelog/105772.yaml deleted file mode 100644 index 73680aa04e5ab..0000000000000 --- a/docs/changelog/105772.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105772 -summary: "[ILM] Delete step deletes data stream with only one index" -area: ILM+SLM -type: bug -issues: [] diff --git a/docs/changelog/105789.yaml b/docs/changelog/105789.yaml deleted file mode 100644 index 02a6936fa3294..0000000000000 --- a/docs/changelog/105789.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105789 -summary: Make Health API more resilient to multi-version clusters -area: Health -type: bug -issues: - - 90183 diff --git a/docs/changelog/105848.yaml b/docs/changelog/105848.yaml deleted file mode 100644 index 18291066177f6..0000000000000 --- a/docs/changelog/105848.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105848 -summary: '`ProjectOperator` should not retain references to released blocks' -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/105941.yaml b/docs/changelog/105941.yaml deleted file mode 100644 index 8e2eea1657208..0000000000000 --- a/docs/changelog/105941.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105941 -summary: Field caps performance pt2 -area: Search -type: enhancement -issues: [] diff --git a/docs/changelog/105945.yaml b/docs/changelog/105945.yaml deleted file mode 100644 index ec76faf6ef76f..0000000000000 --- a/docs/changelog/105945.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105945 -summary: "[Connector API] Fix default ordering in `SyncJob` list endpoint" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/105987.yaml b/docs/changelog/105987.yaml deleted file mode 100644 index d09a6907c72bf..0000000000000 --- a/docs/changelog/105987.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 105987 -summary: Fix `categorize_text` aggregation nested under empty buckets -area: Machine Learning -type: bug -issues: - - 105836 diff --git a/docs/changelog/105994.yaml b/docs/changelog/105994.yaml deleted file mode 100644 index ef9889d0a47af..0000000000000 --- a/docs/changelog/105994.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 105994 -summary: Fix bug when nested knn pre-filter might match nested docs -area: Vector Search -type: bug -issues: [] diff --git a/docs/changelog/106020.yaml b/docs/changelog/106020.yaml deleted file mode 100644 index 094a43b430f89..0000000000000 --- a/docs/changelog/106020.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106020 -summary: Fix resetting a job if the original reset task no longer exists. -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106057.yaml b/docs/changelog/106057.yaml deleted file mode 100644 index c07f658fbbf8a..0000000000000 --- a/docs/changelog/106057.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106057 -summary: Avoid computing `currentInferenceProcessors` on every cluster state -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106060.yaml b/docs/changelog/106060.yaml deleted file mode 100644 index 2b6a47372ddd3..0000000000000 --- a/docs/changelog/106060.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106060 -summary: "[Connector API] Fix serialisation of script params in connector index service" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/106062.yaml b/docs/changelog/106062.yaml deleted file mode 100644 index f4ff3df4045e6..0000000000000 --- a/docs/changelog/106062.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106062 -summary: "During ML maintenance, reset jobs in the reset state without a corresponding\ - \ task" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/106105.yaml b/docs/changelog/106105.yaml deleted file mode 100644 index 09f80e9e71e6d..0000000000000 --- a/docs/changelog/106105.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106105 -summary: Respect --pass option in certutil csr mode -area: TLS -type: bug -issues: [] diff --git a/docs/changelog/106156.yaml b/docs/changelog/106156.yaml deleted file mode 100644 index 63232efe6e5fb..0000000000000 --- a/docs/changelog/106156.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106156 -summary: Disable parallel collection for terms aggregation with `min_doc_count` equals - to 0 -area: Aggregations -type: bug -issues: [] diff --git a/docs/changelog/106288.yaml b/docs/changelog/106288.yaml deleted file mode 100644 index 0f14e53c237a1..0000000000000 --- a/docs/changelog/106288.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106288 -summary: Small time series agg improvement -area: TSDB -type: enhancement -issues: [] diff --git a/docs/changelog/106329.yaml b/docs/changelog/106329.yaml deleted file mode 100644 index 78e811e7987b6..0000000000000 --- a/docs/changelog/106329.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106329 -summary: Fix Search Applications bug where deleting an alias before deleting an application intermittently caused errors -area: Application -type: bug -issues: [] diff --git a/docs/changelog/106351.yaml b/docs/changelog/106351.yaml deleted file mode 100644 index 45868acc3a284..0000000000000 --- a/docs/changelog/106351.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106351 -summary: "Fix error on sorting unsortable `geo_point` and `cartesian_point`" -area: ES|QL -type: bug -issues: - - 106007 diff --git a/docs/changelog/106392.yaml b/docs/changelog/106392.yaml deleted file mode 100644 index ff1a0284ee5db..0000000000000 --- a/docs/changelog/106392.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106392 -summary: Resume driver when failing to fetch pages -area: ES|QL -type: bug -issues: - - 106262 diff --git a/docs/changelog/106398.yaml b/docs/changelog/106398.yaml deleted file mode 100644 index cffc5ceeb214d..0000000000000 --- a/docs/changelog/106398.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106398 -summary: Release `TranslogSnapshot` buffer after iteration -area: Engine -type: bug -issues: - - 106390 diff --git a/docs/changelog/106544.yaml b/docs/changelog/106544.yaml deleted file mode 100644 index 6557ba478126d..0000000000000 --- a/docs/changelog/106544.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106544 -summary: Force execution of `SearchService.Reaper` -area: Search -type: bug -issues: - - 106543 diff --git a/docs/changelog/106574.yaml b/docs/changelog/106574.yaml deleted file mode 100644 index 8063450bc0db1..0000000000000 --- a/docs/changelog/106574.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 106574 -summary: Fix `_reset` API when called with `force=true` on a failed transform -area: Transform -type: bug -issues: - - 106573 diff --git a/docs/changelog/96235.yaml b/docs/changelog/96235.yaml deleted file mode 100644 index 83d1eaf74916b..0000000000000 --- a/docs/changelog/96235.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 96235 -summary: Add `index.mapping.total_fields.ignore_dynamic_beyond_limit` setting to ignore dynamic fields when field limit is reached -area: Mapping -type: enhancement -issues: [] diff --git a/docs/changelog/99142.yaml b/docs/changelog/99142.yaml deleted file mode 100644 index 885946cec909b..0000000000000 --- a/docs/changelog/99142.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99142 -summary: Reuse number field mapper tests in other modules -area: Search -type: enhancement -issues: - - 92947 diff --git a/docs/changelog/99747.yaml b/docs/changelog/99747.yaml deleted file mode 100644 index e3e6edc585ca6..0000000000000 --- a/docs/changelog/99747.yaml +++ /dev/null @@ -1,19 +0,0 @@ -pr: 99747 -summary: Improve storage efficiency for non-metric fields in TSDB -area: TSDB -type: enhancement -issues: [] -highlight: - title: Improve storage efficiency for non-metric fields in TSDB - body: |- - Adds a new `doc_values` encoding for non-metric fields in TSDB that takes advantage of TSDB's index sorting. - While terms that are used in multiple documents (such as the host name) are already stored only once in the terms dictionary, - there are a lot of repetitions in the references to the terms dictionary that are stored in `doc_values` (ordinals). - In TSDB, documents (and therefore `doc_values`) are implicitly sorted by dimenstions and timestamp. - This means that for each time series, we are storing long consecutive runs of the same ordinal. - With this change, we are introducing an encoding that detects and efficiently stores runs of the same value (such as `1 1 1 2 2 2 …`), - and runs of cycling values (such as `1 2 1 2 …`). - In our testing, we have seen a reduction in storage size by about 13%. - The effectiveness of this encoding depends on how many non-metric fields, such as dimensions, are used. - The more non-metric fields, the more effective this improvement will be. - notable: true diff --git a/docs/changelog/99961.yaml b/docs/changelog/99961.yaml deleted file mode 100644 index 457f7801ce218..0000000000000 --- a/docs/changelog/99961.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 99961 -summary: "ESQL: Correct out-of-range filter pushdowns" -area: ES|QL -type: bug -issues: - - 99960 From a49f8b863d6063fde17b6bdea7393f626c0d0d23 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 26 Mar 2024 16:19:03 -0400 Subject: [PATCH 190/214] Redirect shard-level bulk failures to a failure store if applicable (#105362) This PR expands upon previous work in the failure store by inspecting failed shard-level bulk operations and possibly redirecting them to a failure store. --- .../190_failure_store_redirection.yml | 80 ++ .../action/bulk/BulkOperation.java | 444 +++++++-- .../action/bulk/BulkRequestModifier.java | 4 +- ...ava => FailureStoreDocumentConverter.java} | 8 +- .../common/collect/Iterators.java | 101 +- .../action/bulk/BulkOperationTests.java | 870 ++++++++++++++++++ ...> FailureStoreDocumentConverterTests.java} | 11 +- .../common/collect/IteratorsTests.java | 25 + 8 files changed, 1467 insertions(+), 76 deletions(-) rename server/src/main/java/org/elasticsearch/action/bulk/{FailureStoreDocument.java => FailureStoreDocumentConverter.java} (94%) create mode 100644 server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java rename server/src/test/java/org/elasticsearch/action/bulk/{FailureStoreDocumentTests.java => FailureStoreDocumentConverterTests.java} (90%) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index b9621977ff3aa..f22267357104e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -108,3 +108,83 @@ teardown: indices.delete: index: .fs-logs-foobar-* - is_true: acknowledged + +--- +"Redirect shard failure in data stream to failure store": + - skip: + version: " - 8.13.99" + reason: "data stream failure stores only redirect shard failures in 8.14+" + features: [allowed_warnings, contains] + + - do: + allowed_warnings: + - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" + indices.put_index_template: + name: generic_logs_template + body: + index_patterns: logs-* + data_stream: + failure_store: true + template: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + properties: + '@timestamp': + type: date + count: + type: long + + + - do: + index: + index: logs-foobar + refresh: true + body: + '@timestamp': '2020-12-12' + count: 'invalid value' + + - do: + indices.get_data_stream: + name: logs-foobar + - match: { data_streams.0.name: logs-foobar } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store: true } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + search: + index: logs-foobar + body: { query: { match_all: {} } } + - length: { hits.hits: 0 } + + - do: + search: + index: .fs-logs-foobar-* + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "/\\.fs-logs-foobar-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - exists: hits.hits.0._source.@timestamp + - not_exists: hits.hits.0._source.count + - match: { hits.hits.0._source.document.index: 'logs-foobar' } + - match: { hits.hits.0._source.document.source.@timestamp: '2020-12-12' } + - match: { hits.hits.0._source.document.source.count: 'invalid value' } + - match: { hits.hits.0._source.error.type: 'document_parsing_exception' } + - contains: { hits.hits.0._source.error.message: "failed to parse field [count] of type [long] in document with id " } + - contains: { hits.hits.0._source.error.message: "Preview of field's value: 'invalid value'" } + - contains: { hits.hits.0._source.error.stack_trace: "org.elasticsearch.index.mapper.DocumentParsingException: " } + - contains: { hits.hits.0._source.error.stack_trace: "failed to parse field [count] of type [long] in document with id" } + - contains: { hits.hits.0._source.error.stack_trace: "Preview of field's value: 'invalid value'" } + + - do: + indices.delete_data_stream: + name: logs-foobar + - is_true: acknowledged + + - do: + indices.delete: + index: .fs-logs-foobar-* + - is_true: acknowledged diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index 1d95f430d5c7e..1e9b1446850af 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -16,18 +16,21 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.RoutingMissingException; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRouting; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; @@ -39,11 +42,16 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.LongSupplier; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; @@ -59,14 +67,16 @@ final class BulkOperation extends ActionRunnable { private final Task task; private final ThreadPool threadPool; private final ClusterService clusterService; - private BulkRequest bulkRequest; // set to null once all requests are sent out + private BulkRequest bulkRequest; // set to null once all requests are completed private final ActionListener listener; private final AtomicArray responses; + private final ConcurrentLinkedQueue failureStoreRedirects = new ConcurrentLinkedQueue<>(); private final long startTimeNanos; private final ClusterStateObserver observer; private final Map indicesThatCannotBeCreated; private final String executorName; private final LongSupplier relativeTimeProvider; + private final FailureStoreDocumentConverter failureStoreDocumentConverter; private IndexNameExpressionResolver indexNameExpressionResolver; private NodeClient client; @@ -83,6 +93,40 @@ final class BulkOperation extends ActionRunnable { LongSupplier relativeTimeProvider, long startTimeNanos, ActionListener listener + ) { + this( + task, + threadPool, + executorName, + clusterService, + bulkRequest, + client, + responses, + indicesThatCannotBeCreated, + indexNameExpressionResolver, + relativeTimeProvider, + startTimeNanos, + listener, + new ClusterStateObserver(clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext()), + new FailureStoreDocumentConverter() + ); + } + + BulkOperation( + Task task, + ThreadPool threadPool, + String executorName, + ClusterService clusterService, + BulkRequest bulkRequest, + NodeClient client, + AtomicArray responses, + Map indicesThatCannotBeCreated, + IndexNameExpressionResolver indexNameExpressionResolver, + LongSupplier relativeTimeProvider, + long startTimeNanos, + ActionListener listener, + ClusterStateObserver observer, + FailureStoreDocumentConverter failureStoreDocumentConverter ) { super(listener); this.task = task; @@ -97,68 +141,90 @@ final class BulkOperation extends ActionRunnable { this.relativeTimeProvider = relativeTimeProvider; this.indexNameExpressionResolver = indexNameExpressionResolver; this.client = client; - this.observer = new ClusterStateObserver(clusterService, bulkRequest.timeout(), logger, threadPool.getThreadContext()); + this.observer = observer; + this.failureStoreDocumentConverter = failureStoreDocumentConverter; } @Override protected void doRun() { assert bulkRequest != null; final ClusterState clusterState = observer.setAndGetObservedState(); - if (handleBlockExceptions(clusterState)) { + if (handleBlockExceptions(clusterState, BulkOperation.this, this::onFailure)) { + return; + } + Map> requestsByShard = groupBulkRequestsByShards(clusterState); + executeBulkRequestsByShard(requestsByShard, clusterState, this::redirectFailuresOrCompleteBulkOperation); + } + + private void doRedirectFailures() { + assert failureStoreRedirects.isEmpty() != true : "Attempting to redirect failures, but none were present in the queue"; + final ClusterState clusterState = observer.setAndGetObservedState(); + // If the cluster is blocked at this point, discard the failure store redirects and complete the response with the original failures + if (handleBlockExceptions(clusterState, ActionRunnable.run(listener, this::doRedirectFailures), this::discardRedirectsAndFinish)) { return; } - Map> requestsByShard = groupRequestsByShards(clusterState); - executeBulkRequestsByShard(requestsByShard, clusterState); + Map> requestsByShard = drainAndGroupRedirectsByShards(clusterState); + executeBulkRequestsByShard(requestsByShard, clusterState, this::completeBulkOperation); } private long buildTookInMillis(long startTimeNanos) { return TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTimeNanos); } - private Map> groupRequestsByShards(ClusterState clusterState) { + private Map> groupBulkRequestsByShards(ClusterState clusterState) { + return groupRequestsByShards( + clusterState, + Iterators.enumerate(bulkRequest.requests.iterator(), BulkItemRequest::new), + BulkOperation::validateWriteIndex + ); + } + + private Map> drainAndGroupRedirectsByShards(ClusterState clusterState) { + return groupRequestsByShards( + clusterState, + Iterators.fromSupplier(failureStoreRedirects::poll), + (ia, ignore) -> validateRedirectIndex(ia) + ); + } + + private Map> groupRequestsByShards( + ClusterState clusterState, + Iterator it, + BiConsumer> indexOperationValidator + ) { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); Metadata metadata = clusterState.metadata(); // Group the requests by ShardId -> Operations mapping Map> requestsByShard = new HashMap<>(); - for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); + while (it.hasNext()) { + BulkItemRequest bulkItemRequest = it.next(); + DocWriteRequest docWriteRequest = bulkItemRequest.request(); + // the request can only be null because we set it to null in the previous step, so it gets ignored if (docWriteRequest == null) { continue; } - if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, i, metadata)) { + if (addFailureIfRequiresAliasAndAliasIsMissing(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } - if (addFailureIfIndexCannotBeCreated(docWriteRequest, i)) { + if (addFailureIfIndexCannotBeCreated(docWriteRequest, bulkItemRequest.id())) { continue; } - if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, i, metadata)) { + if (addFailureIfRequiresDataStreamAndNoParentDataStream(docWriteRequest, bulkItemRequest.id(), metadata)) { continue; } IndexAbstraction ia = null; - boolean includeDataStreams = docWriteRequest.opType() == DocWriteRequest.OpType.CREATE; try { ia = concreteIndices.resolveIfAbsent(docWriteRequest); - if (ia.isDataStreamRelated() && includeDataStreams == false) { - throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); - } - // The ConcreteIndices#resolveIfAbsent(...) method validates via IndexNameExpressionResolver whether - // an operation is allowed in index into a data stream, but this isn't done when resolve call is cached, so - // the validation needs to be performed here too. - if (ia.getParentDataStream() != null && - // avoid valid cases when directly indexing into a backing index - // (for example when directly indexing into .ds-logs-foobar-000001) - ia.getName().equals(docWriteRequest.index()) == false && docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) { - throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); - } + indexOperationValidator.accept(ia, docWriteRequest); TransportBulkAction.prohibitCustomRoutingOnDataStream(docWriteRequest, metadata); TransportBulkAction.prohibitAppendWritesInBackingIndices(docWriteRequest, metadata); docWriteRequest.routing(metadata.resolveWriteIndexRouting(docWriteRequest.routing(), docWriteRequest.index())); final Index concreteIndex = docWriteRequest.getConcreteWriteIndex(ia, metadata); - if (addFailureIfIndexIsClosed(docWriteRequest, concreteIndex, i, metadata)) { + if (addFailureIfIndexIsClosed(docWriteRequest, concreteIndex, bulkItemRequest.id(), metadata)) { continue; } IndexRouting indexRouting = concreteIndices.routing(concreteIndex); @@ -168,37 +234,56 @@ private Map> groupRequestsByShards(ClusterState c new ShardId(concreteIndex, shardId), shard -> new ArrayList<>() ); - shardRequests.add(new BulkItemRequest(i, docWriteRequest)); + shardRequests.add(bulkItemRequest); } catch (ElasticsearchParseException | IllegalArgumentException | RoutingMissingException | ResourceNotFoundException e) { String name = ia != null ? ia.getName() : docWriteRequest.index(); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(name, docWriteRequest.id(), e); - BulkItemResponse bulkItemResponse = BulkItemResponse.failure(i, docWriteRequest.opType(), failure); - responses.set(i, bulkItemResponse); - // make sure the request gets never processed again - bulkRequest.requests.set(i, null); + addFailureAndDiscardRequest(docWriteRequest, bulkItemRequest.id(), name, e); } } return requestsByShard; } - private void executeBulkRequestsByShard(Map> requestsByShard, ClusterState clusterState) { + /** + * Validates that an index abstraction is capable of receiving the provided write request + */ + private static void validateWriteIndex(IndexAbstraction ia, DocWriteRequest docWriteRequest) { + boolean includeDataStreams = docWriteRequest.opType() == DocWriteRequest.OpType.CREATE; + if (ia.isDataStreamRelated() && includeDataStreams == false) { + throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); + } + // The ConcreteIndices#resolveIfAbsent(...) method validates via IndexNameExpressionResolver whether + // an operation is allowed in index into a data stream, but this isn't done when resolve call is cached, so + // the validation needs to be performed here too. + if (ia.getParentDataStream() != null && + // avoid valid cases when directly indexing into a backing index + // (for example when directly indexing into .ds-logs-foobar-000001) + ia.getName().equals(docWriteRequest.index()) == false && docWriteRequest.opType() != DocWriteRequest.OpType.CREATE) { + throw new IllegalArgumentException("only write ops with an op_type of create are allowed in data streams"); + } + } + + /** + * Validates that an index abstraction is capable of receiving a failure store redirect + */ + private static void validateRedirectIndex(IndexAbstraction ia) { + if (ia.isDataStreamRelated() == false) { + // We should only be dealing with traffic targeting concrete data streams. + throw new IllegalArgumentException("only write ops to data streams with enabled failure stores can be redirected on failure."); + } + } + + private void executeBulkRequestsByShard( + Map> requestsByShard, + ClusterState clusterState, + Runnable onRequestsCompleted + ) { if (requestsByShard.isEmpty()) { - listener.onResponse( - new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) - ); + onRequestsCompleted.run(); return; } String nodeId = clusterService.localNode().getId(); - Runnable onBulkItemsComplete = () -> { - listener.onResponse( - new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) - ); - // Allow memory for bulk shard request items to be reclaimed before all items have been completed - bulkRequest = null; - }; - - try (RefCountingRunnable bulkItemRequestCompleteRefCount = new RefCountingRunnable(onBulkItemsComplete)) { + try (RefCountingRunnable bulkItemRequestCompleteRefCount = new RefCountingRunnable(onRequestsCompleted)) { for (Map.Entry> entry : requestsByShard.entrySet()) { final ShardId shardId = entry.getKey(); final List requests = entry.getValue(); @@ -219,18 +304,75 @@ private void executeBulkRequestsByShard(Map> requ } } + private void redirectFailuresOrCompleteBulkOperation() { + if (DataStream.isFailureStoreEnabled() && failureStoreRedirects.isEmpty() == false) { + doRedirectFailures(); + } else { + completeBulkOperation(); + } + } + + private void completeBulkOperation() { + listener.onResponse( + new BulkResponse(responses.toArray(new BulkItemResponse[responses.length()]), buildTookInMillis(startTimeNanos)) + ); + // Allow memory for bulk shard request items to be reclaimed before all items have been completed + bulkRequest = null; + } + + /** + * Discards all failure store redirections and completes the bulk request. + * @param exception any documents that could have been redirected will have this exception added as a suppressed exception + * on their original failure information. + */ + private void discardRedirectsAndFinish(Exception exception) { + assert failureStoreRedirects.isEmpty() != true : "Attempting to discard redirects, but there were none to discard"; + Iterator redirectedBulkItemIterator = Iterators.fromSupplier(failureStoreRedirects::poll); + while (redirectedBulkItemIterator.hasNext()) { + BulkItemRequest cancelledRedirectBulkItem = redirectedBulkItemIterator.next(); + int slot = cancelledRedirectBulkItem.id(); + BulkItemResponse originalFailure = responses.get(slot); + if (originalFailure.isFailed()) { + originalFailure.getFailure().getCause().addSuppressed(exception); + } + } + completeBulkOperation(); + } + private void executeBulkShardRequest(BulkShardRequest bulkShardRequest, Releasable releaseOnFinish) { client.executeLocally(TransportShardBulkAction.TYPE, bulkShardRequest, new ActionListener<>() { + + // Lazily get the cluster state to avoid keeping it around longer than it is needed + private ClusterState clusterState = null; + + private ClusterState getClusterState() { + if (clusterState == null) { + clusterState = clusterService.state(); + } + return clusterState; + } + @Override public void onResponse(BulkShardResponse bulkShardResponse) { - for (BulkItemResponse bulkItemResponse : bulkShardResponse.getResponses()) { - // we may have no response if item failed - if (bulkItemResponse.getResponse() != null) { + for (int idx = 0; idx < bulkShardResponse.getResponses().length; idx++) { + // We zip the requests and responses together so that we can identify failed documents and potentially store them + BulkItemResponse bulkItemResponse = bulkShardResponse.getResponses()[idx]; + + if (bulkItemResponse.isFailed()) { + BulkItemRequest bulkItemRequest = bulkShardRequest.items()[idx]; + assert bulkItemRequest.id() == bulkItemResponse.getItemId() : "Bulk items were returned out of order"; + + String failureStoreReference = getRedirectTarget(bulkItemRequest.request(), getClusterState().metadata()); + if (failureStoreReference != null) { + addDocumentToRedirectRequests(bulkItemRequest, bulkItemResponse.getFailure().getCause(), failureStoreReference); + } + addFailure(bulkItemResponse); + } else { bulkItemResponse.getResponse().setShardInfo(bulkShardResponse.getShardInfo()); + responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } - responses.set(bulkItemResponse.getItemId(), bulkItemResponse); } - releaseOnFinish.close(); + completeShardOperation(); } @Override @@ -239,33 +381,135 @@ public void onFailure(Exception e) { for (BulkItemRequest request : bulkShardRequest.items()) { final String indexName = request.index(); DocWriteRequest docWriteRequest = request.request(); - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(indexName, docWriteRequest.id(), e); - responses.set(request.id(), BulkItemResponse.failure(request.id(), docWriteRequest.opType(), failure)); + + String failureStoreReference = getRedirectTarget(docWriteRequest, getClusterState().metadata()); + if (failureStoreReference != null) { + addDocumentToRedirectRequests(request, e, failureStoreReference); + } + addFailure(docWriteRequest, request.id(), indexName, e); } + completeShardOperation(); + } + + private void completeShardOperation() { + // Clear our handle on the cluster state to allow it to be cleaned up + clusterState = null; releaseOnFinish.close(); } }); } - private boolean handleBlockExceptions(ClusterState state) { + /** + * Determines if the write request can be redirected if it fails. Write requests can be redirected IFF they are targeting a data stream + * with a failure store and are not already redirected themselves. If the document can be redirected, the data stream name to use for + * the redirection is returned. + * + * @param docWriteRequest the write request to check + * @param metadata cluster state metadata for resolving index abstractions + * @return a data stream name if the write request points to a data stream that has the failure store enabled, + * or {@code null} if it does + */ + private static String getRedirectTarget(DocWriteRequest docWriteRequest, Metadata metadata) { + // Feature flag guard + if (DataStream.isFailureStoreEnabled() == false) { + return null; + } + // Do not resolve a failure store for documents that were already headed to one + if (docWriteRequest instanceof IndexRequest indexRequest && indexRequest.isWriteToFailureStore()) { + return null; + } + // If there is no index abstraction, then the request is using a pattern of some sort, which data streams do not support + IndexAbstraction ia = metadata.getIndicesLookup().get(docWriteRequest.index()); + if (ia == null) { + return null; + } + if (ia.isDataStreamRelated()) { + // The index abstraction could be an alias. Alias abstractions (even for data streams) only keep track of which _index_ they + // will write to, not which _data stream_. + // We work backward to find the data stream from the concrete write index to cover this case. + Index concreteIndex = ia.getWriteIndex(); + IndexAbstraction writeIndexAbstraction = metadata.getIndicesLookup().get(concreteIndex.getName()); + DataStream parentDataStream = writeIndexAbstraction.getParentDataStream(); + if (parentDataStream != null && parentDataStream.isFailureStore()) { + // Keep the data stream name around to resolve the redirect to failure store if the shard level request fails. + return parentDataStream.getName(); + } + } + return null; + } + + /** + * Marks a failed bulk item for redirection. At the end of the first round of shard requests, any documents in the + * redirect list are processed to their final destinations. + * + * @param request The bulk item request that failed + * @param cause The exception for the experienced the failure + * @param failureStoreReference The data stream that contains the failure store for this item + */ + private void addDocumentToRedirectRequests(BulkItemRequest request, Exception cause, String failureStoreReference) { + // Convert the document into a failure document + IndexRequest failureStoreRequest; + try { + failureStoreRequest = failureStoreDocumentConverter.transformFailedRequest( + TransportBulkAction.getIndexWriteRequest(request.request()), + cause, + failureStoreReference, + threadPool::absoluteTimeInMillis + ); + } catch (IOException ioException) { + logger.debug( + () -> "Could not transform failed bulk request item into failure store document. Attempted for [" + + request.request().opType() + + ": index=" + + request.index() + + "; id=" + + request.request().id() + + "; bulk_slot=" + + request.id() + + "] Proceeding with failing the original.", + ioException + ); + // Suppress and do not redirect + cause.addSuppressed(ioException); + return; + } + + // Store for second phase + BulkItemRequest redirected = new BulkItemRequest(request.id(), failureStoreRequest); + failureStoreRedirects.add(redirected); + } + + /** + * Examine the cluster state for blocks before continuing. If any block exists in the cluster state, this function will return + * {@code true}. If the block is retryable, the {@code retryOperation} runnable will be called asynchronously if the cluster ever + * becomes unblocked. If a non retryable block exists, or if we encounter a timeout before the blocks could be cleared, the + * {@code onClusterBlocked} consumer will be invoked with the cluster block exception. + * + * @param state The current state to check for blocks + * @param retryOperation If retryable blocks exist, the runnable to execute after they have cleared. + * @param onClusterBlocked Consumes the block exception if the cluster has a non retryable block or if we encounter a timeout while + * waiting for a block to clear. + * @return {@code true} if the cluster is currently blocked at all, {@code false} if the cluster has no blocks. + */ + private boolean handleBlockExceptions(ClusterState state, Runnable retryOperation, Consumer onClusterBlocked) { ClusterBlockException blockException = state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); if (blockException != null) { if (blockException.retryable()) { logger.trace("cluster is blocked, scheduling a retry", blockException); - retry(blockException); + retry(blockException, retryOperation, onClusterBlocked); } else { - onFailure(blockException); + onClusterBlocked.accept(blockException); } return true; } return false; } - void retry(Exception failure) { + void retry(Exception failure, final Runnable operation, final Consumer onClusterBlocked) { assert failure != null; if (observer.isTimedOut()) { - // we running as a last attempt after a timeout has happened. don't retry - onFailure(failure); + // we are running as a last attempt after a timeout has happened. don't retry + onClusterBlocked.accept(failure); return; } observer.waitForNextChange(new ClusterStateObserver.Listener() { @@ -282,6 +526,8 @@ public void onNewClusterState(ClusterState state) { @Override public void onClusterServiceClose() { + // There is very little we can do about this, and our time in this JVM is likely short. + // Let's just try to get out of here ASAP. onFailure(new NodeClosedException(clusterService.localNode())); } @@ -297,7 +543,7 @@ public void onTimeout(TimeValue timeout) { } private void dispatchRetry() { - threadPool.executor(executorName).submit(BulkOperation.this); + threadPool.executor(executorName).submit(operation); } }); } @@ -308,7 +554,7 @@ private boolean addFailureIfRequiresAliasAndAliasIsMissing(DocWriteRequest re "[" + DocWriteRequest.REQUIRE_ALIAS + "] request flag is [true] and [" + request.index() + "] is not an alias", request.index() ); - addFailure(request, idx, exception); + addFailureAndDiscardRequest(request, idx, request.index(), exception); return true; } return false; @@ -320,7 +566,7 @@ private boolean addFailureIfRequiresDataStreamAndNoParentDataStream(DocWriteRequ "[" + DocWriteRequest.REQUIRE_DATA_STREAM + "] request flag is [true] and [" + request.index() + "] is not a data stream", request.index() ); - addFailure(request, idx, exception); + addFailureAndDiscardRequest(request, idx, request.index(), exception); return true; } return false; @@ -329,7 +575,7 @@ private boolean addFailureIfRequiresDataStreamAndNoParentDataStream(DocWriteRequ private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index concreteIndex, int idx, final Metadata metadata) { IndexMetadata indexMetadata = metadata.getIndexSafe(concreteIndex); if (indexMetadata.getState() == IndexMetadata.State.CLOSE) { - addFailure(request, idx, new IndexClosedException(concreteIndex)); + addFailureAndDiscardRequest(request, idx, request.index(), new IndexClosedException(concreteIndex)); return true; } return false; @@ -338,20 +584,73 @@ private boolean addFailureIfIndexIsClosed(DocWriteRequest request, Index conc private boolean addFailureIfIndexCannotBeCreated(DocWriteRequest request, int idx) { IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); if (cannotCreate != null) { - addFailure(request, idx, cannotCreate); + addFailureAndDiscardRequest(request, idx, request.index(), cannotCreate); return true; } return false; } - private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { - BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.id(), unavailableException); - BulkItemResponse bulkItemResponse = BulkItemResponse.failure(idx, request.opType(), failure); - responses.set(idx, bulkItemResponse); + /** + * Like {@link BulkOperation#addFailure(DocWriteRequest, int, String, Exception)} but this method will remove the corresponding entry + * from the working bulk request so that it never gets processed again during this operation. + */ + private void addFailureAndDiscardRequest(DocWriteRequest request, int idx, String index, Exception exception) { + addFailure(request, idx, index, exception); // make sure the request gets never processed again bulkRequest.requests.set(idx, null); } + /** + * Checks if a bulk item response exists for this entry. If none exists, a failure response is created and set in the response array. + * If a response exists already, the failure information provided to this call will be added to the existing failure as a suppressed + * exception. + * + * @param request The document write request that should be failed + * @param idx The slot of the bulk entry this request corresponds to + * @param index The resource that this entry was being written to when it failed + * @param exception The exception encountered for this entry + * @see BulkOperation#addFailure(BulkItemResponse) BulkOperation.addFailure if you have a bulk item response object already + */ + private void addFailure(DocWriteRequest request, int idx, String index, Exception exception) { + BulkItemResponse bulkItemResponse = responses.get(idx); + if (bulkItemResponse == null) { + BulkItemResponse.Failure failure = new BulkItemResponse.Failure(index, request.id(), exception); + bulkItemResponse = BulkItemResponse.failure(idx, request.opType(), failure); + } else { + // Response already recorded. We should only be here if the existing response is a failure and + // we are encountering a new failure while redirecting. + assert bulkItemResponse.isFailed() : "Attempting to overwrite successful bulk item result with a failure"; + bulkItemResponse.getFailure().getCause().addSuppressed(exception); + } + // Always replace the item in the responses for thread visibility of any mutations + responses.set(idx, bulkItemResponse); + } + + /** + * Checks if a bulk item response exists for this entry. If none exists, the failure is set in the response array. If a response exists + * already, the failure information provided to this call will be added to the existing failure as a suppressed exception. + * + * @param bulkItemResponse the item response to add to the overall result array + * @see BulkOperation#addFailure(DocWriteRequest, int, String, Exception) BulkOperation.addFailure which conditionally creates the + * failure response only when one does not exist already + */ + private void addFailure(BulkItemResponse bulkItemResponse) { + assert bulkItemResponse.isFailed() : "Attempting to add a successful bulk item response via the addFailure method"; + BulkItemResponse existingBulkItemResponse = responses.get(bulkItemResponse.getItemId()); + if (existingBulkItemResponse != null) { + // Response already recorded. We should only be here if the existing response is a failure and + // we are encountering a new failure while redirecting. + assert existingBulkItemResponse.isFailed() : "Attempting to overwrite successful bulk item result with a failure"; + existingBulkItemResponse.getFailure().getCause().addSuppressed(bulkItemResponse.getFailure().getCause()); + bulkItemResponse = existingBulkItemResponse; + } + // Always replace the item in the responses for thread visibility of any mutations + responses.set(bulkItemResponse.getItemId(), bulkItemResponse); + } + + /** + * Resolves and caches index and routing abstractions to more efficiently group write requests into shards. + */ private static class ConcreteIndices { private final ClusterState state; private final IndexNameExpressionResolver indexNameExpressionResolver; @@ -363,6 +662,13 @@ private static class ConcreteIndices { this.indexNameExpressionResolver = indexNameExpressionResolver; } + /** + * Resolves the index abstraction that the write request is targeting, potentially obtaining it from a cache. This instance isn't + * fully resolved, meaning that {@link IndexAbstraction#getWriteIndex()} should be invoked in order to get concrete write index. + * + * @param request a write request + * @return the index abstraction that the write request is targeting + */ IndexAbstraction resolveIfAbsent(DocWriteRequest request) { try { IndexAbstraction indexAbstraction = indexAbstractions.get(request.index()); @@ -380,6 +686,12 @@ IndexAbstraction resolveIfAbsent(DocWriteRequest request) { } } + /** + * Determines which routing strategy to use for a document being written to the provided index, potentially obtaining the result + * from a cache. + * @param index the index to determine routing strategy for + * @return an {@link IndexRouting} object to use for assigning a write request to a shard + */ IndexRouting routing(Index index) { IndexRouting routing = routings.get(index); if (routing == null) { diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java index 5e630bf9cdef5..2112ad48bec62 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java @@ -53,6 +53,7 @@ final class BulkRequestModifier implements Iterator> { final SparseFixedBitSet failedSlots; final List itemResponses; final AtomicIntegerArray originalSlots; + final FailureStoreDocumentConverter failureStoreDocumentConverter; volatile int currentSlot = -1; @@ -61,6 +62,7 @@ final class BulkRequestModifier implements Iterator> { this.failedSlots = new SparseFixedBitSet(bulkRequest.requests().size()); this.itemResponses = new ArrayList<>(bulkRequest.requests().size()); this.originalSlots = new AtomicIntegerArray(bulkRequest.requests().size()); // oversize, but that's ok + this.failureStoreDocumentConverter = new FailureStoreDocumentConverter(); } @Override @@ -243,7 +245,7 @@ public void markItemForFailureStore(int slot, String targetIndexName, Exception ); } else { try { - IndexRequest errorDocument = FailureStoreDocument.transformFailedRequest(indexRequest, e, targetIndexName); + IndexRequest errorDocument = failureStoreDocumentConverter.transformFailedRequest(indexRequest, e, targetIndexName); // This is a fresh index request! We need to do some preprocessing on it. If we do not, when this is returned to // the bulk action, the action will see that it hasn't been processed by ingest yet and attempt to ingest it again. errorDocument.isPipelineResolved(true); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java similarity index 94% rename from server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java rename to server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java index e0d6e8200e86d..ce76f377ac94e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverter.java @@ -22,9 +22,7 @@ /** * Transforms an indexing request using error information into a new index request to be stored in a data stream's failure store. */ -public final class FailureStoreDocument { - - private FailureStoreDocument() {} +public class FailureStoreDocumentConverter { /** * Combines an {@link IndexRequest} that has failed during the bulk process with the error thrown for that request. The result is a @@ -35,7 +33,7 @@ private FailureStoreDocument() {} * @return A new {@link IndexRequest} with a failure store compliant structure * @throws IOException If there is a problem when the document's new source is serialized */ - public static IndexRequest transformFailedRequest(IndexRequest source, Exception exception, String targetIndexName) throws IOException { + public IndexRequest transformFailedRequest(IndexRequest source, Exception exception, String targetIndexName) throws IOException { return transformFailedRequest(source, exception, targetIndexName, System::currentTimeMillis); } @@ -49,7 +47,7 @@ public static IndexRequest transformFailedRequest(IndexRequest source, Exception * @return A new {@link IndexRequest} with a failure store compliant structure * @throws IOException If there is a problem when the document's new source is serialized */ - public static IndexRequest transformFailedRequest( + public IndexRequest transformFailedRequest( IndexRequest source, Exception exception, String targetIndexName, diff --git a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java index ea8eadd66acaa..165280e370025 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java +++ b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java @@ -15,11 +15,13 @@ import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; +import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.BooleanSupplier; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; +import java.util.function.Supplier; import java.util.function.ToIntFunction; public class Iterators { @@ -56,7 +58,7 @@ public static Iterator concat(Iterator... iterators) { for (int i = 0; i < iterators.length; i++) { if (iterators[i].hasNext()) { // explicit generic type argument needed for type inference - return new ConcatenatedIterator(iterators, i); + return new ConcatenatedIterator<>(iterators, i); } } @@ -258,6 +260,103 @@ public T next() { } } + /** + * Enumerates the elements of an iterator together with their index, using a function to combine the pair together into the final items + * produced by the iterator. + *

      + * An example of its usage to enumerate a list of names together with their positional index in the list: + *

      + *
      
      +     * Iterator<String> nameIterator = ...;
      +     * Iterator<Tuple<Integer, String>> enumeratedNames = Iterators.enumerate(nameIterator, Tuple::new);
      +     * enumeratedNames.forEachRemaining(tuple -> System.out.println("Index: " + t.v1() + ", Name: " + t.v2()));
      +     * 
      + * + * @param input The iterator to wrap + * @param fn A function that takes the index for an entry and the entry itself, returning an item that combines them together + * @return An iterator that combines elements together with their indices in the underlying collection + * @param The object type contained in the original iterator + * @param The object type that results from combining the original entry with its index in the iterator + */ + public static Iterator enumerate(Iterator input, BiFunction fn) { + return new EnumeratingIterator<>(Objects.requireNonNull(input), Objects.requireNonNull(fn)); + } + + private static class EnumeratingIterator implements Iterator { + private final Iterator input; + private final BiFunction fn; + + private int idx = 0; + + EnumeratingIterator(Iterator input, BiFunction fn) { + this.input = input; + this.fn = fn; + } + + @Override + public boolean hasNext() { + return input.hasNext(); + } + + @Override + public U next() { + return fn.apply(idx++, input.next()); + } + + @Override + public void forEachRemaining(Consumer action) { + input.forEachRemaining(t -> action.accept(fn.apply(idx++, t))); + } + } + + /** + * Adapts a {@link Supplier} object into an iterator. The resulting iterator will return values from the delegate Supplier until the + * delegate returns a null value. Once the delegate returns null, the iterator will claim to be empty. + *

      + * An example of its usage to iterate over a queue while draining it at the same time: + *

      + *
      
      +     *     LinkedList<String> names = ...;
      +     *     assert names.size() != 0;
      +     *
      +     *     Iterator<String> nameIterator = Iterator.fromSupplier(names::pollFirst);
      +     *     nameIterator.forEachRemaining(System.out::println)
      +     *     assert names.size() == 0;
      +     * 
      + * + * @param input A {@link Supplier} that returns null when no more elements should be returned from the iterator + * @return An iterator that returns elements by calling the supplier until a null value is returned + * @param The object type returned from the supplier function + */ + public static Iterator fromSupplier(Supplier input) { + return new SupplierIterator<>(Objects.requireNonNull(input)); + } + + private static final class SupplierIterator implements Iterator { + private final Supplier fn; + private T head; + + SupplierIterator(Supplier fn) { + this.fn = fn; + this.head = fn.get(); + } + + @Override + public boolean hasNext() { + return head != null; + } + + @Override + public T next() { + if (head == null) { + throw new NoSuchElementException(); + } + T next = head; + head = fn.get(); + return next; + } + } + public static boolean equals(Iterator iterator1, Iterator iterator2, BiPredicate itemComparer) { if (iterator1 == null) { return iterator2 == null; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java new file mode 100644 index 0000000000000..2226c40b618f4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -0,0 +1,870 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.coordination.NoMasterBlockService; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.MapperException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpNodeClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class BulkOperationTests extends ESTestCase { + + private final long millis = randomMillisUpToYear9999(); + private final String indexName = "my_index"; + private final String dataStreamName = "my_data_stream"; + private final String fsDataStreamName = "my_failure_store_data_stream"; + + private final IndexMetadata indexMetadata = IndexMetadata.builder(indexName) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .build() + ) + .build(); + private final IndexMetadata ds1BackingIndex1 = DataStreamTestHelper.createBackingIndex(dataStreamName, 1, millis) + .numberOfShards(2) + .build(); + private final IndexMetadata ds1BackingIndex2 = DataStreamTestHelper.createBackingIndex(dataStreamName, 2, millis + 1) + .numberOfShards(2) + .build(); + private final IndexMetadata ds2BackingIndex1 = DataStreamTestHelper.createBackingIndex(fsDataStreamName, 1, millis) + .numberOfShards(2) + .build(); + private final IndexMetadata ds2FailureStore1 = DataStreamTestHelper.createFailureStore(fsDataStreamName, 1, millis) + .numberOfShards(1) + .build(); + + private final DataStream dataStream1 = DataStreamTestHelper.newInstance( + dataStreamName, + List.of(ds1BackingIndex1.getIndex(), ds1BackingIndex2.getIndex()) + ); + private final DataStream dataStream2 = DataStreamTestHelper.newInstance( + fsDataStreamName, + List.of(ds2BackingIndex1.getIndex()), + List.of(ds2FailureStore1.getIndex()) + ); + + private final ClusterState DEFAULT_STATE = ClusterState.builder(ClusterName.DEFAULT) + .metadata( + Metadata.builder() + .indexTemplates( + Map.of( + "ds-template", + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, false)) + .template(new Template(null, null, null, null)) + .build(), + "ds-template-with-failure-store", + ComposableIndexTemplate.builder() + .indexPatterns(List.of(fsDataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) + .template(new Template(null, null, null, null)) + .build() + ) + ) + .indices( + Map.of( + indexName, + indexMetadata, + ds1BackingIndex1.getIndex().getName(), + ds1BackingIndex1, + ds1BackingIndex2.getIndex().getName(), + ds1BackingIndex2, + ds2BackingIndex1.getIndex().getName(), + ds2BackingIndex1, + ds2FailureStore1.getIndex().getName(), + ds2FailureStore1 + ) + ) + .dataStreams(Map.of(dataStreamName, dataStream1, fsDataStreamName, dataStream2), Map.of()) + .build() + ) + .build(); + + private TestThreadPool threadPool; + + @Before + public void setupThreadpool() { + threadPool = new TestThreadPool(getClass().getName()); + } + + @After + public void tearDownThreadpool() { + terminate(threadPool); + } + + /** + * If a bulk operation begins and the cluster is experiencing a non-retryable block, the bulk operation should fail + */ + public void testClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Not retryable + ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(Metadata.CLUSTER_READ_ONLY_BLOCK).build()) + .build(); + + // Make sure we don't wait at all + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false); + doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, ClusterBlockException.class, future::get); + } + + /** + * If a bulk operation times out while waiting for cluster blocks to be cleared, it should fail the request. + */ + public void testTimeoutOnRetryableClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Retryable + final ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // Always return cluster state, first observation: return same cluster state, second observation: time out, ensure no further wait + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + // Returning same state or timing out will result in one more attempt. + if (randomBoolean()) { + i.getArgument(0, ClusterStateObserver.Listener.class).onNewClusterState(state); + } else { + i.getArgument(0, ClusterStateObserver.Listener.class).onTimeout(null); + } + return null; + }).doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, ClusterBlockException.class, future::get); + verify(observer, times(2)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * If the cluster service closes while a bulk operation is waiting for cluster blocks to be cleared, it should fail the request. + */ + public void testNodeClosedOnRetryableClusterBlockedFailsBulk() { + NodeClient client = getNodeClient((r) -> { + fail("Should not have executed shard action on blocked cluster"); + return null; + }); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Retryable + final ClusterState state = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // Always return cluster state, first observation: signal cluster service closed, ensure no further wait + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(state); + when(observer.isTimedOut()).thenReturn(false); + doAnswer((i) -> { + i.getArgument(0, ClusterStateObserver.Listener.class).onClusterServiceClose(); + return null; + }).doThrow(new AssertionError("Should not wait")).when(observer).waitForNextChange(any()); + + newBulkOperation(client, new BulkRequest(), state, observer, listener).run(); + + expectThrows(ExecutionException.class, NodeClosedException.class, future::get); + verify(observer, times(1)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * A bulk operation to an index should succeed if all of its shard level requests succeed + */ + public void testBulkToIndex() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(indexName).id("1").source(Map.of("key", "val"))); + bulkRequest.add(new IndexRequest(indexName).id("3").source(Map.of("key", "val"))); + + NodeClient client = getNodeClient(this::acceptAllShardWrites); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + } + + /** + * A bulk operation to an index should partially succeed if only some of its shard level requests fail + */ + public void testBulkToIndexFailingEntireShard() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(indexName).id("1").source(Map.of("key", "val"))); + bulkRequest.add(new IndexRequest(indexName).id("3").source(Map.of("key", "val"))); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(indexMetadata.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find failed item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("test"))); + } + + /** + * A bulk operation to a data stream should succeed if all of its shard level requests succeed + */ + public void testBulkToDataStream() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(dataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(dataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient(this::acceptAllShardWrites); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + } + + /** + * A bulk operation to a data stream should partially succeed if only some of its shard level requests fail + */ + public void testBulkToDataStreamFailingEntireShard() throws Exception { + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(dataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(dataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds1BackingIndex2.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find failed item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("test"))); + } + + /** + * A bulk operation to a data stream with a failure store enabled should redirect any shard level failures to the failure store. + */ + public void testFailingEntireShardRedirectsToFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(item -> item.getIndex().equals(ds2FailureStore1.getIndex().getName())) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem, is(notNullValue())); + } + + /** + * A bulk operation to a data stream with a failure store enabled should redirect any documents that fail at a shard level to the + * failure store. + */ + public void testFailingDocumentRedirectsToFailureStore() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + thatFailsDocuments(Map.of(new IndexAndId(ds2BackingIndex1.getIndex().getName(), "3"), () -> new MapperException("test"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(false)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(item -> item.getIndex().equals(ds2FailureStore1.getIndex().getName())) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getIndex(), is(notNullValue())); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the redirected documents experience + * a shard-level failure while writing to the failure store indices. + */ + public void testFailureStoreShardFailureRejectsDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards( + Map.of( + new ShardId(ds2BackingIndex1.getIndex(), 0), + () -> new MapperException("root cause"), + new ShardId(ds2FailureStore1.getIndex(), 0), + () -> new MapperException("failure store test failure") + ) + ) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), is(equalTo("failure store test failure"))); + } + + /** + * A document that fails at the shard level will be converted into a failure document if an applicable failure store is present. + * In the unlikely case that the failure document cannot be created, the document will not be redirected to the failure store and + * instead will simply report its original failure in the response, with the conversion failure present as a suppressed exception. + */ + public void testFailedDocumentCanNotBeConvertedFails() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + NodeClient client = getNodeClient( + thatFailsDocuments(Map.of(new IndexAndId(ds2BackingIndex1.getIndex().getName(), "3"), () -> new MapperException("root cause"))) + ); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + // Mock a failure store document converter that always fails + FailureStoreDocumentConverter mockConverter = mock(FailureStoreDocumentConverter.class); + when(mockConverter.transformFailedRequest(any(), any(), any(), any())).thenThrow(new IOException("Could not serialize json")); + + newBulkOperation(client, bulkRequest, mockConverter, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(IOException.class))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), is(equalTo("Could not serialize json"))); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the cluster is experiencing a + * non-retryable block when the redirected documents would be sent to the shard-level action. + */ + public void testBlockedClusterRejectsFailureStoreDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a non-retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(IndexMetadata.INDEX_READ_ONLY_BLOCK).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the observer timing out causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false); + doThrow(new AssertionError("Should not wait on non retryable block")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(ClusterBlockException.class))); + assertThat( + failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), + is(equalTo("blocked by: [FORBIDDEN/5/index read-only (api)];")) + ); + + verify(observer, times(0)).isTimedOut(); + verify(observer, times(0)).waitForNextChange(any()); + } + + /** + * A bulk operation to a data stream with a failure store enabled may still partially fail if the cluster times out while waiting for a + * retryable block to clear when the redirected documents would be sent to the shard-level action. + */ + public void testOperationTimeoutRejectsFailureStoreDocument() throws Exception { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the observer timing out causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + // Returning same state or timing out will result in one more attempt. + if (randomBoolean()) { + i.getArgument(0, ClusterStateObserver.Listener.class).onNewClusterState(blockedState); + } else { + i.getArgument(0, ClusterStateObserver.Listener.class).onTimeout(null); + } + return null; + }).doThrow(new AssertionError("Should not wait any longer")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + BulkResponse bulkItemResponses = future.get(); + assertThat(bulkItemResponses.hasFailures(), is(true)); + BulkItemResponse failedItem = Arrays.stream(bulkItemResponses.getItems()) + .filter(BulkItemResponse::isFailed) + .findFirst() + .orElseThrow(() -> new AssertionError("Could not find redirected item")); + assertThat(failedItem.getFailure().getCause(), is(instanceOf(MapperException.class))); + assertThat(failedItem.getFailure().getCause().getMessage(), is(equalTo("root cause"))); + assertThat(failedItem.getFailure().getCause().getSuppressed().length, is(not(equalTo(0)))); + assertThat(failedItem.getFailure().getCause().getSuppressed()[0], is(instanceOf(ClusterBlockException.class))); + assertThat( + failedItem.getFailure().getCause().getSuppressed()[0].getMessage(), + is(equalTo("blocked by: [SERVICE_UNAVAILABLE/2/no master];")) + ); + + verify(observer, times(2)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * A bulk operation to a data stream with a failure store enabled may completely fail if the cluster service closes out while waiting + * for a retryable block to clear when the redirected documents would be sent to the shard-level action. + */ + public void testNodeClosureRejectsFailureStoreDocument() { + Assume.assumeTrue(DataStream.isFailureStoreEnabled()); + + // Requests that go to two separate shards + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("1").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + bulkRequest.add(new IndexRequest(fsDataStreamName).id("3").source(Map.of("key", "val")).opType(DocWriteRequest.OpType.CREATE)); + + // Mock client that rejects all shard requests on the first shard in the backing index, and all requests to the only shard of + // the failure store index. + NodeClient client = getNodeClient( + failingShards(Map.of(new ShardId(ds2BackingIndex1.getIndex(), 0), () -> new MapperException("root cause"))) + ); + + // Create a new cluster state that has a retryable cluster block on it + ClusterState blockedState = ClusterState.builder(DEFAULT_STATE) + .blocks(ClusterBlocks.builder().addGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_WRITES).build()) + .build(); + + // First time we will return the normal cluster state (before normal writes) which skips any further interactions, + // Second time we will return a blocked cluster state (before the redirects) causing us to start observing the cluster + // Finally, we will simulate the node closing causing the redirects to fail. + ClusterStateObserver observer = mock(ClusterStateObserver.class); + when(observer.setAndGetObservedState()).thenReturn(DEFAULT_STATE).thenReturn(blockedState); + when(observer.isTimedOut()).thenReturn(false, true); + doAnswer((i) -> { + i.getArgument(0, ClusterStateObserver.Listener.class).onClusterServiceClose(); + return null; + }).doThrow(new AssertionError("Should not wait any longer")).when(observer).waitForNextChange(any()); + + CompletableFuture future = new CompletableFuture<>(); + ActionListener listener = ActionListener.wrap(future::complete, future::completeExceptionally); + + newBulkOperation(client, bulkRequest, DEFAULT_STATE, observer, listener).run(); + + expectThrows(ExecutionException.class, NodeClosedException.class, future::get); + + verify(observer, times(1)).isTimedOut(); + verify(observer, times(1)).waitForNextChange(any()); + } + + /** + * Accepts all write operations from the given request object when it is encountered in the mock shard bulk action + */ + private BulkShardResponse acceptAllShardWrites(BulkShardRequest request) { + return new BulkShardResponse( + request.shardId(), + Arrays.stream(request.items()).map(item -> requestToResponse(request.shardId(), item)).toArray(BulkItemResponse[]::new) + ); + } + + /** + * Maps an entire shard id to an exception to throw when it is encountered in the mock shard bulk action + */ + private CheckedFunction failingShards(Map> shardsToFail) { + return (BulkShardRequest request) -> { + if (shardsToFail.containsKey(request.shardId())) { + throw shardsToFail.get(request.shardId()).get(); + } else { + return acceptAllShardWrites(request); + } + }; + } + + /** + * Index name / id tuple + */ + private record IndexAndId(String indexName, String id) {} + + /** + * Maps a document to an exception to thrown when it is encountered in the mock shard bulk action + */ + private CheckedFunction thatFailsDocuments( + Map> documentsToFail + ) { + return (BulkShardRequest request) -> new BulkShardResponse(request.shardId(), Arrays.stream(request.items()).map(item -> { + IndexAndId key = new IndexAndId(request.index(), item.request().id()); + if (documentsToFail.containsKey(key)) { + return requestToFailedResponse(item, documentsToFail.get(key).get()); + } else { + return requestToResponse(request.shardId(), item); + } + }).toArray(BulkItemResponse[]::new)); + } + + /** + * Create a shard-level result given a bulk item + */ + private static BulkItemResponse requestToResponse(ShardId shardId, BulkItemRequest itemRequest) { + return BulkItemResponse.success(itemRequest.id(), itemRequest.request().opType(), switch (itemRequest.request().opType()) { + case INDEX, CREATE -> new IndexResponse(shardId, itemRequest.request().id(), 1, 1, 1, true); + case UPDATE -> new UpdateResponse(shardId, itemRequest.request().id(), 1, 1, 1, DocWriteResponse.Result.UPDATED); + case DELETE -> new DeleteResponse(shardId, itemRequest.request().id(), 1, 1, 1, true); + }); + } + + /** + * Create a shard-level failure given a bulk item + */ + private static BulkItemResponse requestToFailedResponse(BulkItemRequest itemRequest, Exception reason) { + return BulkItemResponse.failure( + itemRequest.id(), + itemRequest.request().opType(), + new BulkItemResponse.Failure(itemRequest.index(), itemRequest.request().id(), reason) + ); + } + + /** + * Create a client that redirects expected actions to the provided function and fails if an unexpected operation happens. + * @param onShardAction Called when TransportShardBulkAction is executed. + * @return A node client for the test. + */ + private NodeClient getNodeClient(CheckedFunction onShardAction) { + return new NoOpNodeClient(threadPool) { + @Override + @SuppressWarnings("unchecked") + public Task executeLocally( + ActionType action, + Request request, + ActionListener listener + ) { + if (TransportShardBulkAction.TYPE.equals(action)) { + Response response = null; + Exception exception = null; + try { + response = (Response) onShardAction.apply((BulkShardRequest) request); + } catch (Exception responseException) { + exception = responseException; + } + if (response != null) { + listener.onResponse(response); + } else { + listener.onFailure(exception); + } + } else { + fail("Unexpected client call to " + action.name()); + } + return null; + } + }; + } + + private BulkOperation newBulkOperation(NodeClient client, BulkRequest request, ActionListener listener) { + return newBulkOperation( + DEFAULT_STATE, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + mockObserver(DEFAULT_STATE), + listener, + new FailureStoreDocumentConverter() + ); + } + + private BulkOperation newBulkOperation( + NodeClient client, + BulkRequest request, + FailureStoreDocumentConverter failureStoreDocumentConverter, + ActionListener listener + ) { + return newBulkOperation( + DEFAULT_STATE, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + mockObserver(DEFAULT_STATE), + listener, + failureStoreDocumentConverter + ); + } + + private BulkOperation newBulkOperation( + NodeClient client, + BulkRequest request, + ClusterState state, + ClusterStateObserver observer, + ActionListener listener + ) { + return newBulkOperation( + state, + client, + request, + new AtomicArray<>(request.numberOfActions()), + Map.of(), + observer, + listener, + new FailureStoreDocumentConverter() + ); + } + + private BulkOperation newBulkOperation( + ClusterState state, + NodeClient client, + BulkRequest request, + AtomicArray existingResponses, + Map indicesThatCanNotBeCreated, + ClusterStateObserver observer, + ActionListener listener, + FailureStoreDocumentConverter failureStoreDocumentConverter + ) { + // Time provision + long timeZero = TimeUnit.MILLISECONDS.toNanos(randomMillisUpToYear9999() - TimeUnit.DAYS.toMillis(1)); + long duration = TimeUnit.SECONDS.toNanos(randomLongBetween(1, 60)); + long endTime = timeZero + duration; + + // Expressions + ThreadContext ctx = threadPool.getThreadContext(); + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(ctx, new SystemIndices(List.of())); + + // Mocks + final DiscoveryNode mockNode = mock(DiscoveryNode.class); + when(mockNode.getId()).thenReturn(randomAlphaOfLength(10)); + final ClusterService clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(state); + when(clusterService.localNode()).thenReturn(mockNode); + + return new BulkOperation( + null, + threadPool, + ThreadPool.Names.SAME, + clusterService, + request, + client, + existingResponses, + indicesThatCanNotBeCreated, + indexNameExpressionResolver, + () -> endTime, + timeZero, + listener, + observer, + failureStoreDocumentConverter + ); + } + + /** + * A default mock cluster state observer that simply returns the state + */ + private ClusterStateObserver mockObserver(ClusterState state) { + ClusterStateObserver mockObserver = mock(ClusterStateObserver.class); + when(mockObserver.setAndGetObservedState()).thenReturn(state); + when(mockObserver.isTimedOut()).thenReturn(false); + return mockObserver; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java similarity index 90% rename from server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java rename to server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java index 962c796e18c2a..67116bd40c2c8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentConverterTests.java @@ -22,7 +22,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; -public class FailureStoreDocumentTests extends ESTestCase { +public class FailureStoreDocumentConverterTests extends ESTestCase { public void testFailureStoreDocumentConverstion() throws Exception { IndexRequest source = new IndexRequest("original_index").routing("fake_routing") @@ -36,7 +36,12 @@ public void testFailureStoreDocumentConverstion() throws Exception { String targetIndexName = "rerouted_index"; long testTime = 1702357200000L; // 2023-12-12T05:00:00.000Z - IndexRequest convertedRequest = FailureStoreDocument.transformFailedRequest(source, exception, targetIndexName, () -> testTime); + IndexRequest convertedRequest = new FailureStoreDocumentConverter().transformFailedRequest( + source, + exception, + targetIndexName, + () -> testTime + ); // Retargeting write assertThat(convertedRequest.id(), is(nullValue())); @@ -63,7 +68,7 @@ public void testFailureStoreDocumentConverstion() throws Exception { ); assertThat( ObjectPath.eval("error.stack_trace", convertedRequest.sourceAsMap()), - containsString("at org.elasticsearch.action.bulk.FailureStoreDocumentTests.testFailureStoreDocumentConverstion") + containsString("at org.elasticsearch.action.bulk.FailureStoreDocumentConverterTests.testFailureStoreDocumentConverstion") ); assertThat(convertedRequest.isWriteToFailureStore(), is(true)); diff --git a/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java b/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java index 351efa59f2381..67f74df78e256 100644 --- a/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java +++ b/server/src/test/java/org/elasticsearch/common/collect/IteratorsTests.java @@ -9,12 +9,14 @@ package org.elasticsearch.common.collect; import org.elasticsearch.common.Randomness; +import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; @@ -242,6 +244,29 @@ public Integer next() { assertEquals(array.length, index.get()); } + public void testEnumerate() { + assertEmptyIterator(Iterators.enumerate(Iterators.concat(), Tuple::new)); + + final var array = randomIntegerArray(); + final var index = new AtomicInteger(); + Iterators.enumerate(Iterators.forArray(array), Tuple::new).forEachRemaining(t -> { + int idx = index.getAndIncrement(); + assertEquals(idx, t.v1().intValue()); + assertEquals(array[idx], t.v2()); + }); + assertEquals(array.length, index.get()); + } + + public void testSupplier() { + assertEmptyIterator(Iterators.fromSupplier(() -> null)); + + final var array = randomIntegerArray(); + final var index = new AtomicInteger(); + final var queue = new LinkedList<>(Arrays.asList(array)); + Iterators.fromSupplier(queue::pollFirst).forEachRemaining(i -> assertEquals(array[index.getAndIncrement()], i)); + assertEquals(array.length, index.get()); + } + public void testEquals() { final BiPredicate notCalled = (a, b) -> { throw new AssertionError("not called"); }; From 9495f2daa1bc05ec103f2ffaad96121315f8572a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Mar 2024 20:21:15 +0000 Subject: [PATCH 191/214] Bump versions after 8.13.0 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 12 ++++++------ .buildkite/pipelines/periodic.yml | 16 ++++++++-------- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 3 +-- .../src/main/java/org/elasticsearch/Version.java | 2 +- .../org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 20 insertions(+), 19 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index f45caaf7fdfaf..cb8062fef02b4 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index c38e0e48cd070..9992d940e3c97 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1889,8 +1889,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.12.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.3 + - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 timeout_in_minutes: 300 matrix: setup: @@ -1903,10 +1903,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.12.3 + BWC_VERSION: 8.13.0 - - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 + - label: "{{matrix.image}} / 8.13.1 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.1 timeout_in_minutes: 300 matrix: setup: @@ -1919,7 +1919,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.0 + BWC_VERSION: 8.13.1 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 23f0e7d4bbacf..ff378477f7aa6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1162,8 +1162,8 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.2 - - label: 8.12.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.3#bwcTest + - label: 8.13.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -1171,9 +1171,9 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.12.3 - - label: 8.13.0 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest + BWC_VERSION: 8.13.0 + - label: 8.13.1 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.1#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -1181,7 +1181,7 @@ steps: machineType: n1-standard-32 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.0 + BWC_VERSION: 8.13.1 - label: 8.14.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.14.0#bwcTest timeout_in_minutes: 300 @@ -1256,7 +1256,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -1300,7 +1300,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.20", "8.12.3", "8.13.0", "8.14.0"] + BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index bc5c24cf0f365..a655b5a862683 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -115,6 +115,6 @@ BWC_VERSION: - "8.12.0" - "8.12.1" - "8.12.2" - - "8.12.3" - "8.13.0" + - "8.13.1" - "8.14.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 6d391a3fd72ae..f31603772a7f7 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - "7.17.20" - - "8.12.3" - - "8.13.0" + - "8.13.1" - "8.14.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 391ede4d2aa40..3a4958e046a82 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -167,8 +167,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_12_0 = new Version(8_12_00_99); public static final Version V_8_12_1 = new Version(8_12_01_99); public static final Version V_8_12_2 = new Version(8_12_02_99); - public static final Version V_8_12_3 = new Version(8_12_03_99); public static final Version V_8_13_0 = new Version(8_13_00_99); + public static final Version V_8_13_1 = new Version(8_13_01_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version CURRENT = V_8_14_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 17f594ec992d1..0a1480526c9f0 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -113,3 +113,4 @@ 8.12.0,8560000 8.12.1,8560001 8.12.2,8560001 +8.13.0,8595000 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index b29ae972c9b13..f66cda3c08fc7 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -113,3 +113,4 @@ 8.12.0,8500008 8.12.1,8500010 8.12.2,8500010 +8.13.0,8503000 From 9e6b893896a738bbb77b2dab7cc1e6640881ceee Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 26 Mar 2024 13:37:19 -0700 Subject: [PATCH 192/214] Text fields are stored by default in TSDB indices (#106338) * Text fields are stored by default with synthetic source Synthetic source requires text fields to be stored or have keyword sub-field that supports synthetic source. If there are no keyword fields users currently have to explicitly set 'store' to 'true' or get a validation exception. This is not the best experience. It is quite likely that setting `store` to `true` is the correct thing to do but users still get an error and need to investigate it. With this change if `store` setting is not specified in such context it will be set to `true` by default. Setting it explicitly to `false` results in the exception. Closes #97039 --- docs/changelog/106338.yaml | 6 ++ docs/reference/mapping/types/text.asciidoc | 7 +- .../test/tsdb/90_unsupported_operations.yml | 1 + .../index/mapper/DynamicFieldsBuilder.java | 6 +- .../index/mapper/FieldMapper.java | 23 ++++++ .../index/mapper/KeywordFieldMapper.java | 17 +++++ .../index/mapper/TextFieldMapper.java | 68 ++++++++++++----- .../index/query/QueryRewriteContext.java | 6 +- .../fielddata/AbstractFieldDataTestCase.java | 8 +- .../index/fielddata/FilterFieldDataTests.java | 27 +++++-- .../fielddata/IndexFieldDataServiceTests.java | 24 +++--- .../mapper/DocumentParserContextTests.java | 8 +- .../index/mapper/MultiFieldsTests.java | 72 ++++++++++++++++++ .../index/mapper/ObjectMapperMergeTests.java | 6 +- .../index/mapper/ObjectMapperTests.java | 4 +- .../mapper/TextFieldAnalyzerModeTests.java | 13 ++++ .../index/mapper/TextFieldMapperTests.java | 74 ++++++++++++++++++- .../highlight/HighlightBuilderTests.java | 6 +- .../rescore/QueryRescorerBuilderTests.java | 12 ++- 19 files changed, 332 insertions(+), 56 deletions(-) create mode 100644 docs/changelog/106338.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java diff --git a/docs/changelog/106338.yaml b/docs/changelog/106338.yaml new file mode 100644 index 0000000000000..c05826d87a11f --- /dev/null +++ b/docs/changelog/106338.yaml @@ -0,0 +1,6 @@ +pr: 106338 +summary: Text fields are stored by default in TSDB indices +area: TSDB +type: enhancement +issues: + - 97039 diff --git a/docs/reference/mapping/types/text.asciidoc b/docs/reference/mapping/types/text.asciidoc index 2a6e2f3ef8ae8..c33af69df5607 100644 --- a/docs/reference/mapping/types/text.asciidoc +++ b/docs/reference/mapping/types/text.asciidoc @@ -133,8 +133,11 @@ The following parameters are accepted by `text` fields: <>:: Whether the field value should be stored and retrievable separately from - the <> field. Accepts `true` or `false` - (default). + the <> field. Accepts `true` or `false` (default). + This parameter will be automatically set to `true` for TSDB indices + (indices that have `index.mode` set to `time_series`) + if there is no <> + sub-field that supports synthetic `_source`. <>:: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index 049b9670b6b46..57ad446eaf637 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -278,6 +278,7 @@ synthetic source text field: type: keyword name: type: text + store: false value: type: long time_series_metric: gauge diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java index 8505c561bfb1a..799042b4f3a87 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicFieldsBuilder.java @@ -333,7 +333,11 @@ public boolean newDynamicStringField(DocumentParserContext context, String name) ); } else { return createDynamicField( - new TextFieldMapper.Builder(name, context.indexAnalyzers()).addMultiField( + new TextFieldMapper.Builder( + name, + context.indexAnalyzers(), + context.indexSettings().getMode().isSyntheticSourceEnabled() + ).addMultiField( new KeywordFieldMapper.Builder("keyword", context.indexSettings().getIndexVersionCreated()).ignoreAbove(256) ), context diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index e029aaa657d23..fe9bdd73cfa10 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -450,13 +450,28 @@ public static class Builder { private final Map> mapperBuilders = new HashMap<>(); + private boolean hasSyntheticSourceCompatibleKeywordField; + public Builder add(FieldMapper.Builder builder) { mapperBuilders.put(builder.name(), builder::build); + + if (builder instanceof KeywordFieldMapper.Builder kwd) { + if (kwd.hasNormalizer() == false && (kwd.hasDocValues() || kwd.isStored())) { + hasSyntheticSourceCompatibleKeywordField = true; + } + } + return this; } private void add(FieldMapper mapper) { mapperBuilders.put(mapper.simpleName(), context -> mapper); + + if (mapper instanceof KeywordFieldMapper kwd) { + if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { + hasSyntheticSourceCompatibleKeywordField = true; + } + } } private void update(FieldMapper toMerge, MapperMergeContext context) { @@ -474,6 +489,10 @@ public boolean hasMultiFields() { return mapperBuilders.isEmpty() == false; } + public boolean hasSyntheticSourceCompatibleKeywordField() { + return hasSyntheticSourceCompatibleKeywordField; + } + public MultiFields build(Mapper.Builder mainFieldBuilder, MapperBuilderContext context) { if (mapperBuilders.isEmpty()) { return empty(); @@ -1134,6 +1153,10 @@ public static Parameter storeParam(Function initi return Parameter.boolParam("store", false, initializer, defaultValue); } + public static Parameter storeParam(Function initializer, Supplier defaultValue) { + return Parameter.boolParam("store", false, initializer, defaultValue); + } + public static Parameter docValuesParam(Function initializer, boolean defaultValue) { return Parameter.boolParam("doc_values", false, initializer, defaultValue); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 4024798a85370..bdf25307d3343 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -227,6 +227,10 @@ Builder normalizer(String normalizerName) { return this; } + public boolean hasNormalizer() { + return this.normalizer.get() != null; + } + Builder nullValue(String nullValue) { this.nullValue.setValue(nullValue); return this; @@ -237,6 +241,10 @@ public Builder docValues(boolean hasDocValues) { return this; } + public boolean hasDocValues() { + return this.hasDocValues.get(); + } + public Builder dimension(boolean dimension) { this.dimension.setValue(dimension); return this; @@ -247,6 +255,15 @@ public Builder indexed(boolean indexed) { return this; } + public Builder stored(boolean stored) { + this.stored.setValue(stored); + return this; + } + + public boolean isStored() { + return this.stored.get(); + } + private FieldValues scriptValues() { if (script.get() == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index faa840dacc732..ef512e2bbd46b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -236,9 +236,11 @@ private static FielddataFrequencyFilter parseFrequencyFilter(String name, Mappin public static class Builder extends FieldMapper.Builder { private final IndexVersion indexCreatedVersion; + private final Parameter store; + + private final boolean isSyntheticSourceEnabledViaIndexMode; private final Parameter index = Parameter.indexParam(m -> ((TextFieldMapper) m).index, true); - private final Parameter store = Parameter.storeParam(m -> ((TextFieldMapper) m).store, false); final Parameter similarity = TextParams.similarity(m -> ((TextFieldMapper) m).similarity); @@ -283,12 +285,28 @@ public static class Builder extends FieldMapper.Builder { final TextParams.Analyzers analyzers; - public Builder(String name, IndexAnalyzers indexAnalyzers) { - this(name, IndexVersion.current(), indexAnalyzers); + public Builder(String name, IndexAnalyzers indexAnalyzers, boolean isSyntheticSourceEnabledViaIndexMode) { + this(name, IndexVersion.current(), indexAnalyzers, isSyntheticSourceEnabledViaIndexMode); } - public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers indexAnalyzers) { + public Builder( + String name, + IndexVersion indexCreatedVersion, + IndexAnalyzers indexAnalyzers, + boolean isSyntheticSourceEnabledViaIndexMode + ) { super(name); + + // If synthetic source is used we need to either store this field + // to recreate the source or use keyword multi-fields for that. + // So if there are no suitable multi-fields we will default to + // storing the field without requiring users to explicitly set 'store'. + // + // If 'store' parameter was explicitly provided we'll reject the request. + this.store = Parameter.storeParam( + m -> ((TextFieldMapper) m).store, + () -> isSyntheticSourceEnabledViaIndexMode && multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField() == false + ); this.indexCreatedVersion = indexCreatedVersion; this.analyzers = new TextParams.Analyzers( indexAnalyzers, @@ -296,6 +314,7 @@ public Builder(String name, IndexVersion indexCreatedVersion, IndexAnalyzers ind m -> (((TextFieldMapper) m).positionIncrementGap), indexCreatedVersion ); + this.isSyntheticSourceEnabledViaIndexMode = isSyntheticSourceEnabledViaIndexMode; } public Builder index(boolean index) { @@ -387,13 +406,9 @@ private static KeywordFieldMapper.KeywordFieldType syntheticSourceDelegate(Field if (fieldType.stored()) { return null; } - for (Mapper sub : multiFields) { - if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { - KeywordFieldMapper kwd = (KeywordFieldMapper) sub; - if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { - return kwd.fieldType(); - } - } + var kwd = getKeywordFieldMapperForSyntheticSource(multiFields); + if (kwd != null) { + return kwd.fieldType(); } return null; } @@ -483,7 +498,7 @@ public TextFieldMapper build(MapperBuilderContext context) { private static final IndexVersion MINIMUM_COMPATIBILITY_VERSION = IndexVersion.fromId(5000099); public static final TypeParser PARSER = new TypeParser( - (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers()), + (n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers(), c.getIndexSettings().getMode().isSyntheticSourceEnabled()), MINIMUM_COMPATIBILITY_VERSION ); @@ -1203,6 +1218,8 @@ public Query existsQuery(SearchExecutionContext context) { private final SubFieldInfo prefixFieldInfo; private final SubFieldInfo phraseFieldInfo; + private final boolean isSyntheticSourceEnabledViaIndexMode; + private TextFieldMapper( String simpleName, FieldType fieldType, @@ -1235,6 +1252,7 @@ private TextFieldMapper( this.indexPrefixes = builder.indexPrefixes.getValue(); this.freqFilter = builder.freqFilter.getValue(); this.fieldData = builder.fieldData.get(); + this.isSyntheticSourceEnabledViaIndexMode = builder.isSyntheticSourceEnabledViaIndexMode; } @Override @@ -1258,7 +1276,7 @@ public Map indexAnalyzers() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(simpleName(), indexCreatedVersion, indexAnalyzers).init(this); + return new Builder(simpleName(), indexCreatedVersion, indexAnalyzers, isSyntheticSourceEnabledViaIndexMode).init(this); } @Override @@ -1454,15 +1472,12 @@ protected void write(XContentBuilder b, Object value) throws IOException { } }; } - for (Mapper sub : this) { - if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { - KeywordFieldMapper kwd = (KeywordFieldMapper) sub; - if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { - return kwd.syntheticFieldLoader(simpleName()); - } - } + var kwd = getKeywordFieldMapperForSyntheticSource(this); + if (kwd != null) { + return kwd.syntheticFieldLoader(simpleName()); } + throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -1473,4 +1488,17 @@ protected void write(XContentBuilder b, Object value) throws IOException { ) ); } + + private static KeywordFieldMapper getKeywordFieldMapperForSyntheticSource(Iterable multiFields) { + for (Mapper sub : multiFields) { + if (sub.typeName().equals(KeywordFieldMapper.CONTENT_TYPE)) { + KeywordFieldMapper kwd = (KeywordFieldMapper) sub; + if (kwd.hasNormalizer() == false && (kwd.fieldType().hasDocValues() || kwd.fieldType().isStored())) { + return kwd; + } + } + } + + return null; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fbfce6aab403f..6ab5d6d77d86d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -196,7 +196,11 @@ MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMap if (fieldMapping != null || allowUnmappedFields) { return fieldMapping; } else if (mapUnmappedFieldAsString) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + getIndexAnalyzers(), + getIndexSettings() != null && getIndexSettings().getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } else { throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 43628fe59daa3..683bfb19aac26 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -90,9 +90,11 @@ public > IFD getForField(String type, String field if (docValues) { fieldType = new KeywordFieldMapper.Builder(fieldName, IndexVersion.current()).build(context).fieldType(); } else { - fieldType = new TextFieldMapper.Builder(fieldName, createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + fieldType = new TextFieldMapper.Builder( + fieldName, + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); } } else if (type.equals("float")) { fieldType = new NumberFieldMapper.Builder( diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 45ebfba265c2f..4df1961c123af 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -52,10 +52,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) - .fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0) - .build(builderContext) - .fieldType(); + MappedFieldType ft = new TextFieldMapper.Builder( + "high_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).fielddataFrequencyFilter(0, random.nextBoolean() ? 100 : 0.5d, 0).build(builderContext).fieldType(); IndexOrdinalsFieldData fieldData = searchExecutionContext.getForField(ft, MappedFieldType.FielddataOperation.SEARCH); for (LeafReaderContext context : contexts) { LeafOrdinalsFieldData loadDirect = fieldData.loadDirect(context); @@ -67,7 +68,11 @@ public void testFilterByFrequency() throws Exception { } { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("high_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "high_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, 201, 100) .build(builderContext) .fieldType(); @@ -82,7 +87,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true);// test # docs with value - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "med_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) .fieldType(); @@ -98,7 +107,11 @@ public void testFilterByFrequency() throws Exception { { indexService.clearCaches(false, true); - MappedFieldType ft = new TextFieldMapper.Builder("med_freq", createDefaultIndexAnalyzers()).fielddata(true) + MappedFieldType ft = new TextFieldMapper.Builder( + "med_freq", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true) .fielddataFrequencyFilter(random.nextBoolean() ? 101 : 101d / 200.0d, Integer.MAX_VALUE, 101) .build(builderContext) .fieldType(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index bf9176de1b124..8c583fe3976fa 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -156,12 +156,16 @@ public void testClearField() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.root(false, false); - final MappedFieldType mapper1 = new TextFieldMapper.Builder("field_1", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); - final MappedFieldType mapper2 = new TextFieldMapper.Builder("field_2", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + final MappedFieldType mapper1 = new TextFieldMapper.Builder( + "field_1", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); + final MappedFieldType mapper2 = new TextFieldMapper.Builder( + "field_2", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("field_1", "thisisastring", Store.NO)); @@ -223,9 +227,11 @@ public void testFieldDataCacheListener() throws Exception { ); final MapperBuilderContext context = MapperBuilderContext.root(false, false); - final MappedFieldType mapper1 = new TextFieldMapper.Builder("s", createDefaultIndexAnalyzers()).fielddata(true) - .build(context) - .fieldType(); + final MappedFieldType mapper1 = new TextFieldMapper.Builder( + "s", + createDefaultIndexAnalyzers(), + indexService.getIndexSettings().getMode().isSyntheticSourceEnabled() + ).fielddata(true).build(context).fieldType(); final IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new KeywordAnalyzer())); Document doc = new Document(); doc.add(new StringField("s", "thisisastring", Store.NO)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java index 03716f8ad4497..9b66d0011ba69 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserContextTests.java @@ -20,9 +20,9 @@ public class DocumentParserContextTests extends ESTestCase { private final MapperBuilderContext root = MapperBuilderContext.root(false, false); public void testDynamicMapperSizeMultipleMappers() { - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); - context.addDynamicMapper(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(2, context.getNewFieldsSize()); context.addDynamicRuntimeField(new TestRuntimeField("runtime1", "keyword")); assertEquals(3, context.getNewFieldsSize()); @@ -37,9 +37,9 @@ public void testDynamicMapperSizeSameFieldMultipleRuntimeFields() { } public void testDynamicMapperSizeSameFieldMultipleMappers() { - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); - context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers()).build(root)); + context.addDynamicMapper(new TextFieldMapper.Builder("foo", createDefaultIndexAnalyzers(), false).build(root)); assertEquals(1, context.getNewFieldsSize()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java new file mode 100644 index 0000000000000..01cbe496e6a3d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.script.ScriptCompiler; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +public class MultiFieldsTests extends ESTestCase { + + public void testMultiFieldsBuilderHasSyntheticSourceCompatibleKeywordField() { + var isStored = randomBoolean(); + var hasNormalizer = randomBoolean(); + + var builder = new FieldMapper.MultiFields.Builder(); + assertFalse(builder.hasSyntheticSourceCompatibleKeywordField()); + + var keywordFieldMapperBuilder = getKeywordFieldMapperBuilder(isStored, hasNormalizer); + builder.add(keywordFieldMapperBuilder); + + var expected = hasNormalizer == false; + assertEquals(expected, builder.hasSyntheticSourceCompatibleKeywordField()); + } + + public void testMultiFieldsBuilderHasSyntheticSourceCompatibleKeywordFieldDuringMerge() { + var isStored = randomBoolean(); + var hasNormalizer = randomBoolean(); + + var builder = new TextFieldMapper.Builder("text_field", createDefaultIndexAnalyzers(), false); + assertFalse(builder.multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField()); + + var keywordFieldMapperBuilder = getKeywordFieldMapperBuilder(isStored, hasNormalizer); + + var newField = new TextFieldMapper.Builder("text_field", createDefaultIndexAnalyzers(), false).addMultiField( + keywordFieldMapperBuilder + ).build(MapperBuilderContext.root(false, false)); + + builder.merge(newField, new FieldMapper.Conflicts("TextFieldMapper"), MapperMergeContext.root(false, false, Long.MAX_VALUE)); + + var expected = hasNormalizer == false; + assertEquals(expected, builder.multiFieldsBuilder.hasSyntheticSourceCompatibleKeywordField()); + } + + private KeywordFieldMapper.Builder getKeywordFieldMapperBuilder(boolean isStored, boolean hasNormalizer) { + var keywordFieldMapperBuilder = new KeywordFieldMapper.Builder( + "field", + IndexAnalyzers.of(Map.of(), Map.of("normalizer", Lucene.STANDARD_ANALYZER), Map.of()), + ScriptCompiler.NONE, + IndexVersion.current() + ); + if (isStored) { + keywordFieldMapperBuilder.stored(true); + if (randomBoolean()) { + keywordFieldMapperBuilder.docValues(false); + } + } + if (hasNormalizer) { + keywordFieldMapperBuilder.normalizer("normalizer"); + } + return keywordFieldMapperBuilder; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index e024f2fa7b1ea..3c4aca4d36284 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -27,10 +27,10 @@ private RootObjectMapper createMapping( rootBuilder.add(new ObjectMapper.Builder("disabled", Explicit.IMPLICIT_TRUE).enabled(disabledFieldEnabled)); ObjectMapper.Builder fooBuilder = new ObjectMapper.Builder("foo", Explicit.IMPLICIT_TRUE).enabled(fooFieldEnabled); if (includeBarField) { - fooBuilder.add(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers())); + fooBuilder.add(new TextFieldMapper.Builder("bar", createDefaultIndexAnalyzers(), false)); } if (includeBazField) { - fooBuilder.add(new TextFieldMapper.Builder("baz", createDefaultIndexAnalyzers())); + fooBuilder.add(new TextFieldMapper.Builder("baz", createDefaultIndexAnalyzers(), false)); } rootBuilder.add(fooBuilder); return rootBuilder.build(MapperBuilderContext.root(false, false)); @@ -366,7 +366,7 @@ private TextFieldMapper.Builder createTextKeywordMultiField(String name) { } private TextFieldMapper.Builder createTextKeywordMultiField(String name, String multiFieldName) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers(), false); builder.multiFieldsBuilder.add(new KeywordFieldMapper.Builder(multiFieldName, IndexVersion.current())); return builder; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 6472f09ce1be7..74b293ca7d6d6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -530,11 +530,11 @@ public void testSyntheticSourceDocValuesFieldWithout() throws IOException { public void testNestedObjectWithMultiFieldsgetTotalFieldsCount() { ObjectMapper.Builder mapperBuilder = new ObjectMapper.Builder("parent_size_1", Explicit.IMPLICIT_TRUE).add( new ObjectMapper.Builder("child_size_2", Explicit.IMPLICIT_TRUE).add( - new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers()).addMultiField( + new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers(), false).addMultiField( new KeywordFieldMapper.Builder("multi_field_size_4", IndexVersion.current()) ) .addMultiField( - new TextFieldMapper.Builder("grand_child_size_5", createDefaultIndexAnalyzers()).addMultiField( + new TextFieldMapper.Builder("grand_child_size_5", createDefaultIndexAnalyzers(), false).addMultiField( new KeywordFieldMapper.Builder("multi_field_of_multi_field_size_6", IndexVersion.current()) ) ) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java index 8cb3ecef4c35c..def8841045746 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldAnalyzerModeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisMode; @@ -67,6 +68,9 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { fieldNode.put("analyzer", "my_analyzer"); MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), Settings.EMPTY) + ); // check AnalysisMode.ALL works Map analyzers = defaultAnalyzers(); @@ -102,6 +106,12 @@ public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { } MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings( + IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), + Settings.EMPTY + ) + ); // check AnalysisMode.ALL and AnalysisMode.SEARCH_TIME works Map analyzers = defaultAnalyzers(); @@ -143,6 +153,9 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { fieldNode.put("analyzer", "my_analyzer"); MappingParserContext parserContext = mock(MappingParserContext.class); when(parserContext.indexVersionCreated()).thenReturn(IndexVersion.current()); + when(parserContext.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(), Settings.EMPTY) + ); // check that "analyzer" set to AnalysisMode.INDEX_TIME is blocked if there is no search analyzer AnalysisMode mode = AnalysisMode.INDEX_TIME; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index f92867d1ce461..1c5ae3baca827 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -44,9 +44,11 @@ import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; @@ -249,6 +251,64 @@ public void testDefaults() throws IOException { assertEquals(DocValuesType.NONE, fieldType.docValuesType()); } + public void testStoreParameterDefaults() throws IOException { + var timeSeriesIndexMode = randomBoolean(); + var isStored = randomBoolean(); + var hasKeywordFieldForSyntheticSource = randomBoolean(); + + var indexSettingsBuilder = getIndexSettingsBuilder(); + if (timeSeriesIndexMode) { + indexSettingsBuilder.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES) + .putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dimension") + .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2000-01-08T23:40:53.384Z") + .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z"); + } + var indexSettings = indexSettingsBuilder.build(); + + var mapping = mapping(b -> { + b.startObject("field"); + b.field("type", "text"); + if (isStored) { + b.field("store", isStored); + } + if (hasKeywordFieldForSyntheticSource) { + b.startObject("fields"); + b.startObject("keyword"); + b.field("type", "keyword"); + b.endObject(); + b.endObject(); + } + b.endObject(); + + if (timeSeriesIndexMode) { + b.startObject("@timestamp"); + b.field("type", "date"); + b.endObject(); + b.startObject("dimension"); + b.field("type", "keyword"); + b.field("time_series_dimension", "true"); + b.endObject(); + } + }); + DocumentMapper mapper = createMapperService(getVersion(), indexSettings, () -> true, mapping).documentMapper(); + + var source = source(TimeSeriesRoutingHashFieldMapper.DUMMY_ENCODED_VALUE, b -> { + b.field("field", "1234"); + if (timeSeriesIndexMode) { + b.field("@timestamp", randomMillisUpToYear9999()); + b.field("dimension", "dimension1"); + } + }, null); + ParsedDocument doc = mapper.parse(source); + List fields = doc.rootDoc().getFields("field"); + IndexableFieldType fieldType = fields.get(0).fieldType(); + if (isStored || (timeSeriesIndexMode && hasKeywordFieldForSyntheticSource == false)) { + assertTrue(fieldType.stored()); + } else { + assertFalse(fieldType.stored()); + } + } + public void testBWCSerialization() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { b.field("type", "text"); @@ -1138,7 +1198,8 @@ public SyntheticSourceExample example(int maxValues) { delegate.expectedForSyntheticSource(), delegate.expectedForBlockLoader(), b -> { - b.field("type", "text").field("store", true); + b.field("type", "text"); + b.field("store", true); if (indexText == false) { b.field("index", false); } @@ -1196,6 +1257,17 @@ public List invalidExample() throws IOException { b.endObject(); } b.endObject(); + }), + new SyntheticSourceInvalidExample(err, b -> { + b.field("type", "text"); + b.startObject("fields"); + { + b.startObject("kwd"); + b.field("type", "keyword"); + b.field("doc_values", "false"); + b.endObject(); + } + b.endObject(); }) ); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index c521ab5e047aa..4e4f5c9c0ddfa 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -317,7 +317,11 @@ public void testBuildSearchContextHighlight() throws IOException { ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 0ade522ae1ffa..7113117a4d7fa 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -160,7 +160,11 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; @@ -222,7 +226,11 @@ public void testRewritingKeepsSettings() throws IOException { ) { @Override public MappedFieldType getFieldType(String name) { - TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, createDefaultIndexAnalyzers()); + TextFieldMapper.Builder builder = new TextFieldMapper.Builder( + name, + createDefaultIndexAnalyzers(), + idxSettings.getMode().isSyntheticSourceEnabled() + ); return builder.build(MapperBuilderContext.root(false, false)).fieldType(); } }; From 2f0a7b8629a9892b1ef0b8f40bd02832558c8b84 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Tue, 26 Mar 2024 23:27:36 -0400 Subject: [PATCH 193/214] [ES|QL] Refactor data type conversions between String/UnsignedLong and other data types (#106628) * refactor data type conversions related to string and unsignedlong in esql. --- .../xpack/esql/qa/rest/RestEsqlTestCase.java | 2 +- .../src/main/resources/floats.csv-spec | 4 +- .../src/main/resources/ints.csv-spec | 12 +- .../src/main/resources/row.csv-spec | 14 +- .../convert/ToDoubleFromStringEvaluator.java | 8 +- .../convert/ToIntegerFromStringEvaluator.java | 7 +- .../convert/ToLongFromStringEvaluator.java | 8 +- .../xpack/esql/action/PositionToXContent.java | 22 +-- .../xpack/esql/action/ResponseValueUtils.java | 39 +++-- .../function/scalar/convert/ToBoolean.java | 9 +- .../scalar/convert/ToCartesianPoint.java | 4 +- .../scalar/convert/ToCartesianShape.java | 4 +- .../function/scalar/convert/ToDouble.java | 10 +- .../function/scalar/convert/ToGeoPoint.java | 4 +- .../function/scalar/convert/ToGeoShape.java | 4 +- .../function/scalar/convert/ToIP.java | 4 +- .../function/scalar/convert/ToInteger.java | 23 +-- .../function/scalar/convert/ToLong.java | 19 +-- .../function/scalar/convert/ToString.java | 32 ++-- .../scalar/convert/ToUnsignedLong.java | 20 +-- .../function/scalar/convert/ToVersion.java | 4 +- .../expression/function/scalar/math/Cast.java | 12 +- .../function/scalar/math/Log10.java | 3 +- .../function/scalar/math/Round.java | 9 +- .../expression/function/scalar/math/Sqrt.java | 4 +- .../function/scalar/multivalue/MvAvg.java | 2 +- .../function/scalar/multivalue/MvMedian.java | 16 +- .../function/scalar/multivalue/MvSlice.java | 5 +- .../predicate/operator/arithmetic/Div.java | 4 +- .../predicate/operator/arithmetic/Mod.java | 4 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 11 +- .../xpack/esql/parser/ExpressionBuilder.java | 10 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 3 +- .../planner/EsqlExpressionTranslators.java | 9 +- .../esql/planner/LocalExecutionPlanner.java | 3 +- .../esql/type/EsqlDataTypeConverter.java | 161 +++++++++++++++++- .../scalar/convert/ToDoubleTests.java | 7 +- .../scalar/convert/ToIntegerTests.java | 10 +- .../function/scalar/convert/ToLongTests.java | 10 +- .../function/scalar/math/Log10Tests.java | 6 +- .../function/scalar/math/SqrtTests.java | 4 +- .../scalar/multivalue/MvAvgTests.java | 3 +- 42 files changed, 343 insertions(+), 206 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index cccd1a3f8854b..2d0a39da5a8b4 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -422,7 +422,7 @@ public void testWarningHeadersOnFailedConversions() throws IOException { for (int i = 1; i <= expectedWarnings; i++) { assertThat( warnings.get(i), - containsString("java.lang.NumberFormatException: For input string: \\\"keyword" + (2 * i - 1) + "\\\"") + containsString("org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [keyword" + (2 * i - 1) + "]") ); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 75011388a9f5a..9c343083275cd 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -55,14 +55,14 @@ emp_no:integer |hire_date:date |hire_double:double 10003 |1986-08-28T00:00:00.000Z|5.255712E11 ; -convertFromString +convertFromString#[skip:-8.13.99, reason:warning changed in 8.14] // tag::to_double-str[] ROW str1 = "5.20128E11", str2 = "foo" | EVAL dbl = TO_DOUBLE("520128000000"), dbl1 = TO_DOUBLE(str1), dbl2 = TO_DOUBLE(str2) // end::to_double-str[] ; warning:Line 2:72: evaluation of [TO_DOUBLE(str2)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:72: java.lang.NumberFormatException: For input string: \"foo\" +warning:Line 2:72: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [foo] // tag::to_double-str-result[] str1:keyword |str2:keyword |dbl:double |dbl1:double |dbl2:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 7a64c9a87e0c9..8657602e7b16f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -153,14 +153,14 @@ tf:boolean |t2l:long |f2l:long |tf2l:long [true, false] |1 |0 |[1, 0] ; -convertStringToLong +convertStringToLong#[skip:-8.13.99, reason:warning changed in 8.14] // tag::to_long-str[] ROW str1 = "2147483648", str2 = "2147483648.2", str3 = "foo" | EVAL long1 = TO_LONG(str1), long2 = TO_LONG(str2), long3 = TO_LONG(str3) // end::to_long-str[] ; warning:Line 2:62: evaluation of [TO_LONG(str3)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 2:62: java.lang.NumberFormatException: For input string: \"foo\" +warning:Line 2:62: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [foo] // tag::to_long-str-result[] @@ -238,16 +238,16 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer 2147483647 |2147483646.2 |2147483647 |2147483646 ; -convertStringToIntFail +convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] required_feature: esql.mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:79: java.lang.NumberFormatException: For input string: \"2147483647.2\" +warning:Line 1:79: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [2147483647.2] warning:Line 1:102: evaluation of [to_integer(str2)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:102: java.lang.NumberFormatException: For input string: \"2147483648\" +warning:Line 1:102: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [2147483648] warning:Line 1:126: evaluation of [to_integer(non)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:126: java.lang.NumberFormatException: For input string: \"no number\" +warning:Line 1:126: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [no number] str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer |noi:integer 2147483647.2 |2147483648 |no number |null |null |null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec index 4e5df6c535be7..3f441c94967d5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/row.csv-spec @@ -289,30 +289,30 @@ a:integer |b:integer |c:integer // end::in-with-expressions-result[] ; -convertMvToMvDifferentCardinality +convertMvToMvDifferentCardinality#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["1", "2", "three"] | eval ints = to_int(strings); warning:Line 1:49: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:49: java.lang.NumberFormatException: For input string: \"three\" +warning:Line 1:49: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [three] strings:keyword |ints:integer [1, 2, three] |[1, 2] ; -convertMvToSv +convertMvToSv#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["1", "two"] | eval ints = to_int(strings); warning:Line 1:42: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:42: java.lang.NumberFormatException: For input string: \"two\" +warning:Line 1:42: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [two] strings:keyword |ints:integer [1, two] |1 ; -convertMvToNull +convertMvToNull#[skip:-8.13.99, reason:warning changed in 8.14] row strings = ["one", "two"] | eval ints = to_int(strings); warning:Line 1:44: evaluation of [to_int(strings)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:44: java.lang.NumberFormatException: For input string: \"one\" -warning:Line 1:44: java.lang.NumberFormatException: For input string: \"two\" +warning:Line 1:44: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [one] +warning:Line 1:44: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [two] strings:keyword |ints:integer [one, two] |null diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index b1fc80b9260ad..209b15ef21a2f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -15,6 +14,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantDoubleBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendDouble(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -84,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendDouble(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 600fa293394f9..ef91bf890cd23 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -41,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -50,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p, scratchPad)); - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -85,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index e0eca6b6bcbff..0d7a2cb9d7459 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -4,7 +4,6 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import java.lang.NumberFormatException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -15,6 +14,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +40,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +49,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -84,7 +84,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java index 8770d313ac2fe..5488efda7834f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/PositionToXContent.java @@ -17,21 +17,20 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.lucene.UnsupportedValueSource; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; -import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; abstract class PositionToXContent { protected final Block block; @@ -109,7 +108,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { BytesRef val = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch); - return builder.value(DocValueFormat.IP.format(val)); + return builder.value(ipToString(val)); } }; case "date" -> new PositionToXContent(block) { @@ -120,18 +119,11 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(dateTimeToString(longVal)); } }; - case "geo_point", "geo_shape" -> new PositionToXContent(block) { + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { - return builder.value(GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); - } - }; - case "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { - @Override - protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) - throws IOException { - return builder.value(CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); + return builder.value(spatialToString(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; case "boolean" -> new PositionToXContent(block) { @@ -146,7 +138,7 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { BytesRef val = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch); - return builder.value(new Version(val).toString()); + return builder.value(versionToString(val)); } }; case "null" -> new PositionToXContent(block) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 8691f2b142d87..f467512fd6c0b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -21,7 +21,6 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.UnsupportedValueSource; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -30,7 +29,6 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; import java.io.UncheckedIOException; @@ -41,11 +39,14 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; /** * Collection of static utility methods for helping transform response data between pages and values. @@ -128,16 +129,17 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef case "keyword", "text" -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); case "ip" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); - yield DocValueFormat.IP.format(val); + yield ipToString(val); } case "date" -> { long longVal = ((LongBlock) block).getLong(offset); yield dateTimeToString(longVal); } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); - case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point", "geo_shape" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "cartesian_point", "cartesian_shape" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "version" -> versionToString(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> spatialToString( + ((BytesRefBlock) block).getBytesRef(offset, scratch) + ); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -169,21 +171,23 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li var builder = results.get(c); var value = row.get(c); switch (dataTypes.get(c)) { - case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong(asLongUnsigned(((Number) value).longValue())); + case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong( + longToUnsignedLong(((Number) value).longValue(), true) + ); case "long" -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); case "integer" -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); case "double" -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); case "keyword", "text", "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( new BytesRef(value.toString()) ); - case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(parseIP(value.toString())); + case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToIP(value.toString())); case "date" -> { long longVal = dateTimeToLong(value.toString()); ((LongBlock.Builder) builder).appendLong(longVal); } case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); case "null" -> builder.appendNull(); - case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(value.toString()).toBytesRef()); + case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(stringToVersion(new BytesRef(value.toString()))); case "_source" -> { @SuppressWarnings("unchecked") Map o = (Map) value; @@ -196,14 +200,9 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li throw new UncheckedIOException(e); } } - case "geo_point", "geo_shape" -> { - // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = GEO.wktToWkb(value.toString()); - ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); - } - case "cartesian_point", "cartesian_shape" -> { + case "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here - BytesRef wkb = CARTESIAN.wktToWkb(value.toString()); + BytesRef wkb = stringToSpatial(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index b5f33184d1395..cd9fcb0390937 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -16,10 +16,11 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import java.math.BigInteger; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToBoolean; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBoolean; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; @@ -27,7 +28,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToBoolean extends AbstractConvertFunction { @@ -71,7 +71,7 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString") static boolean fromKeyword(BytesRef keyword) { - return Boolean.parseBoolean(keyword.utf8ToString()); + return stringToBoolean(keyword.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble") @@ -86,8 +86,7 @@ static boolean fromLong(long l) { @ConvertEvaluator(extraName = "FromUnsignedLong") static boolean fromUnsignedLong(long ul) { - Number n = unsignedLongAsNumber(ul); - return n instanceof BigInteger || n.longValue() != 0; + return unsignedLongToBoolean(ul); } @ConvertEvaluator(extraName = "FromInt") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index 7a67681018727..4ef666b03dfb3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; public class ToCartesianPoint extends AbstractConvertFunction { @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java index 0e52e0870a354..6bd57a1dd2641 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -19,11 +19,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; public class ToCartesianShape extends AbstractConvertFunction { @@ -64,6 +64,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return CARTESIAN.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index d74ba553fc9d0..005f27abc2a56 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,6 +20,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToDouble; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -27,7 +30,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToDouble extends AbstractConvertFunction { @@ -78,14 +80,14 @@ static double fromBoolean(boolean bool) { return bool ? 1d : 0d; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static double fromKeyword(BytesRef in) { - return Double.parseDouble(in.utf8ToString()); + return stringToDouble(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromUnsignedLong") static double fromUnsignedLong(long l) { - return unsignedLongAsNumber(l).doubleValue(); + return unsignedLongToDouble(l); } @ConvertEvaluator(extraName = "FromLong") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index acfaa7c3964c2..96e366be25e44 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -19,10 +19,10 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToGeoPoint extends AbstractConvertFunction { @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java index e557735e6dfe1..d8381547b1651 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -19,11 +19,11 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToGeoShape extends AbstractConvertFunction { @@ -61,6 +61,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef in) { - return GEO.wktToWkb(in.utf8ToString()); + return stringToSpatial(in.utf8ToString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 856f903e278c5..acb9ef7b46d63 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -19,7 +19,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_IP; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) static BytesRef fromKeyword(BytesRef asString) { - return (BytesRef) STRING_TO_IP.convert(asString); + return stringToIP(asString); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 16b2d45c7ae26..32d83de8da846 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -20,6 +20,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToInt; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -29,7 +31,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToInteger extends AbstractConvertFunction { @@ -80,18 +81,9 @@ static int fromBoolean(boolean bool) { return bool ? 1 : 0; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static int fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - try { - return Integer.parseInt(asString); - } catch (NumberFormatException nfe) { - try { - return fromDouble(Double.parseDouble(asString)); - } catch (Exception e) { - throw nfe; - } - } + return stringToInt(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) @@ -101,12 +93,7 @@ static int fromDouble(double dbl) { @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) static int fromUnsignedLong(long ul) { - Number n = unsignedLongAsNumber(ul); - int i = n.intValue(); - if (i != n.longValue()) { - throw new InvalidArgumentException("[{}] out of [integer] range", n); - } - return i; + return unsignedLongToInt(ul); } @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 3139b2ee740b4..b8dea5d8b42ea 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -20,8 +20,9 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -30,7 +31,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToLong extends AbstractConvertFunction { @@ -81,18 +81,9 @@ static long fromBoolean(boolean bool) { return bool ? 1L : 0L; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class }) static long fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - try { - return Long.parseLong(asString); - } catch (NumberFormatException nfe) { - try { - return fromDouble(Double.parseDouble(asString)); - } catch (Exception e) { - throw nfe; - } - } + return stringToLong(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) @@ -102,7 +93,7 @@ static long fromDouble(double dbl) { @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) static long fromUnsignedLong(long ul) { - return safeToLong(unsignedLongAsNumber(ul)); + return unsignedLongToLong(ul); } @ConvertEvaluator(extraName = "FromInt") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 33663534bf6cd..a15d610f2b517 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -9,7 +9,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -17,12 +16,16 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.versionfield.Version; import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.numericBooleanToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.spatialToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; @@ -37,9 +40,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToString extends AbstractConvertFunction implements EvaluatorMapper { @@ -107,12 +107,12 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromBoolean") static BytesRef fromBoolean(boolean bool) { - return new BytesRef(String.valueOf(bool)); + return numericBooleanToString(bool); } @ConvertEvaluator(extraName = "FromIP") static BytesRef fromIP(BytesRef ip) { - return new BytesRef(DocValueFormat.IP.format(ip)); + return new BytesRef(ipToString(ip)); } @ConvertEvaluator(extraName = "FromDatetime") @@ -122,46 +122,46 @@ static BytesRef fromDatetime(long datetime) { @ConvertEvaluator(extraName = "FromDouble") static BytesRef fromDouble(double dbl) { - return new BytesRef(String.valueOf(dbl)); + return numericBooleanToString(dbl); } @ConvertEvaluator(extraName = "FromLong") static BytesRef fromDouble(long lng) { - return new BytesRef(String.valueOf(lng)); + return numericBooleanToString(lng); } @ConvertEvaluator(extraName = "FromInt") static BytesRef fromDouble(int integer) { - return new BytesRef(String.valueOf(integer)); + return numericBooleanToString(integer); } @ConvertEvaluator(extraName = "FromVersion") static BytesRef fromVersion(BytesRef version) { - return new BytesRef(new Version(version).toString()); + return new BytesRef(versionToString(version)); } @ConvertEvaluator(extraName = "FromUnsignedLong") static BytesRef fromUnsignedLong(long lng) { - return new BytesRef(unsignedLongAsNumber(lng).toString()); + return unsignedLongToString(lng); } @ConvertEvaluator(extraName = "FromGeoPoint") static BytesRef fromGeoPoint(BytesRef wkb) { - return new BytesRef(GEO.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromCartesianPoint") static BytesRef fromCartesianPoint(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromCartesianShape") static BytesRef fromCartesianShape(BytesRef wkb) { - return new BytesRef(GEO.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } @ConvertEvaluator(extraName = "FromGeoShape") static BytesRef fromGeoShape(BytesRef wkb) { - return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + return new BytesRef(spatialToString(wkb)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 31bbcd4bf302f..8127fd2103051 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -20,7 +20,11 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.booleanToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.doubleToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -29,9 +33,6 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; public class ToUnsignedLong extends AbstractConvertFunction { @@ -79,27 +80,26 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromBoolean") static long fromBoolean(boolean bool) { - return bool ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; + return booleanToUnsignedLong(bool); } @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static long fromKeyword(BytesRef in) { - String asString = in.utf8ToString(); - return asLongUnsigned(safeToUnsignedLong(asString)); + return stringToUnsignedLong(in.utf8ToString()); } @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static long fromDouble(double dbl) { - return asLongUnsigned(safeToUnsignedLong(dbl)); + return doubleToUnsignedLong(dbl); } @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static long fromLong(long lng) { - return asLongUnsigned(safeToUnsignedLong(lng)); + return longToUnsignedLong(lng, false); } @ConvertEvaluator(extraName = "FromInt", warnExceptions = { InvalidArgumentException.class }) static long fromInt(int i) { - return fromLong(i); + return intToUnsignedLong(i); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index ca3012871fced..b0e0b385ee3c4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -15,11 +15,11 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.versionfield.Version; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; @@ -59,6 +59,6 @@ protected NodeInfo info() { @ConvertEvaluator(extraName = "FromString") static BytesRef fromKeyword(BytesRef asString) { - return new Version(asString.utf8ToString()).toBytesRef(); + return stringToVersion(asString); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java index 1c9f42de2f640..60bb904ab4849 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java @@ -11,12 +11,13 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongToDouble; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.intToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; public class Cast { /** @@ -84,15 +85,12 @@ static double castUnsignedLongToDouble(long v) { @Evaluator(extraName = "IntToUnsignedLong") static long castIntToUnsignedLong(int v) { - return castLongToUnsignedLong(v); + return intToUnsignedLong(v); } @Evaluator(extraName = "LongToUnsignedLong") // TODO: catch-to-null in evaluator? static long castLongToUnsignedLong(long v) { - if (v < 0) { - throw new InvalidArgumentException("[" + v + "] out of [unsigned_long] range"); - } - return v; + return longToUnsignedLong(v, false); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 1b41d2d407bd5..46df37c685cf7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -74,7 +75,7 @@ static double processUnsignedLong(long val) { if (val == NumericUtils.ZERO_AS_UNSIGNED_LONG) { throw new ArithmeticException("Log of non-positive number"); } - return Math.log10(NumericUtils.unsignedLongToDouble(val)); + return Math.log10(unsignedLongToDouble(val)); } @Evaluator(extraName = "Int", warnExceptions = ArithmeticException.class) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 758b56093d40f..2edb1c7c3a159 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -29,12 +29,12 @@ import java.util.function.BiFunction; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isInteger; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asUnsignedLong; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class Round extends EsqlScalarFunction implements OptionalArgument { @@ -99,10 +99,9 @@ static long processUnsignedLong(long val, long decimals) { Number ul = unsignedLongAsNumber(val); if (ul instanceof BigInteger bi) { BigInteger rounded = Maths.round(bi, decimals); - BigInteger unsignedLong = asUnsignedLong(rounded); - return asLongUnsigned(unsignedLong); + return bigIntegerToUnsignedLong(rounded); } else { - return asLongUnsigned(Maths.round(ul.longValue(), decimals)); + return longToUnsignedLong(Maths.round(ul.longValue(), decimals), false); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 542f667c61b95..17882f1baa81d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -18,11 +18,11 @@ import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.NumericUtils; import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; @@ -71,7 +71,7 @@ static double process(long val) { @Evaluator(extraName = "UnsignedLong") static double processUnsignedLong(long val) { - return Math.sqrt(NumericUtils.unsignedLongToDouble(val)); + return Math.sqrt(unsignedLongToDouble(val)); } @Evaluator(extraName = "Int", warnExceptions = ArithmeticException.class) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 0b4652b305741..5265d5bcad660 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -23,9 +23,9 @@ import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongToDouble; /** * Reduce a multivalued field to a single valued field containing the average value. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index 7c3cb2c1c2d1d..8f65d15134cfa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -26,10 +26,10 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBigInteger; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; /** * Reduce a multivalued field to a single valued field containing the average value. @@ -156,9 +156,9 @@ static long finishUnsignedLong(Longs longs) { Arrays.sort(longs.values, 0, longs.count); int middle = longs.count / 2; longs.count = 0; - BigInteger a = unsignedLongAsBigInteger(longs.values[middle - 1]); - BigInteger b = unsignedLongAsBigInteger(longs.values[middle]); - return asLongUnsigned(a.add(b).shiftRight(1).longValue()); + BigInteger a = unsignedLongToBigInteger(longs.values[middle - 1]); + BigInteger b = unsignedLongToBigInteger(longs.values[middle]); + return bigIntegerToUnsignedLong(a.add(b).shiftRight(1)); } /** @@ -169,9 +169,9 @@ static long ascendingUnsignedLong(LongBlock values, int firstValue, int count) { if (count % 2 == 1) { return values.getLong(middle); } - BigInteger a = unsignedLongAsBigInteger(values.getLong(middle - 1)); - BigInteger b = unsignedLongAsBigInteger(values.getLong(middle)); - return asLongUnsigned(a.add(b).shiftRight(1).longValue()); + BigInteger a = unsignedLongToBigInteger(values.getLong(middle - 1)); + BigInteger b = unsignedLongToBigInteger(values.getLong(middle)); + return bigIntegerToUnsignedLong(a.add(b).shiftRight(1)); } static class Ints { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index 29ec231f08555..e054fc7e00e24 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -35,6 +35,7 @@ import java.util.Objects; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.THIRD; @@ -129,8 +130,8 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator( Function toEvaluator ) { if (start.foldable() && end.foldable()) { - int startOffset = Integer.parseInt(String.valueOf(start.fold())); - int endOffset = Integer.parseInt(String.valueOf(end.fold())); + int startOffset = stringToInt(String.valueOf(start.fold())); + int endOffset = stringToInt(String.valueOf(end.fold())); checkStartEnd(startOffset, endOffset); } return switch (PlannerUtils.toElementType(field.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index 42fd526cb3b99..170e3de6e4209 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -15,7 +15,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation.OperationSymbol.DIV; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; public class Div extends EsqlArithmeticOperation implements BinaryComparisonInversible { @@ -73,7 +73,7 @@ static long processLongs(long lhs, long rhs) { @Evaluator(extraName = "UnsignedLongs", warnExceptions = { ArithmeticException.class }) static long processUnsignedLongs(long lhs, long rhs) { - return asLongUnsigned(Long.divideUnsigned(asLongUnsigned(lhs), asLongUnsigned(rhs))); + return longToUnsignedLong(Long.divideUnsigned(longToUnsignedLong(lhs, true), longToUnsignedLong(rhs, true)), true); } @Evaluator(extraName = "Doubles") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java index 41a396819a7f2..bc1ad8fcb5f94 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java @@ -13,7 +13,7 @@ import org.elasticsearch.xpack.ql.tree.Source; import static org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation.OperationSymbol.MOD; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; public class Mod extends EsqlArithmeticOperation { @@ -52,7 +52,7 @@ static long processLongs(long lhs, long rhs) { @Evaluator(extraName = "UnsignedLongs", warnExceptions = { ArithmeticException.class }) static long processUnsignedLongs(long lhs, long rhs) { - return asLongUnsigned(Long.remainderUnsigned(asLongUnsigned(lhs), asLongUnsigned(rhs))); + return longToUnsignedLong(Long.remainderUnsigned(longToUnsignedLong(lhs, true), longToUnsignedLong(rhs, true)), true); } @Evaluator(extraName = "Doubles") diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index c20763e08f343..d198d740029e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -209,6 +209,7 @@ import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.Entry.of; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader.readerFromPlanReader; import static org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanWriter.writerFromPlanWriter; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; @@ -996,7 +997,7 @@ static void writeFieldAttribute(PlanStreamOutput out, FieldAttribute fileAttribu out.writeNamed(EsField.class, fileAttribute.field()); out.writeOptionalString(fileAttribute.qualifier()); out.writeEnum(fileAttribute.nullable()); - out.writeLong(Long.parseLong(fileAttribute.id().toString())); + out.writeLong(stringToLong(fileAttribute.id().toString())); out.writeBoolean(fileAttribute.synthetic()); } @@ -1018,7 +1019,7 @@ static void writeReferenceAttr(PlanStreamOutput out, ReferenceAttribute referenc out.writeString(referenceAttribute.dataType().typeName()); out.writeOptionalString(referenceAttribute.qualifier()); out.writeEnum(referenceAttribute.nullable()); - out.writeLong(Long.parseLong(referenceAttribute.id().toString())); + out.writeLong(stringToLong(referenceAttribute.id().toString())); out.writeBoolean(referenceAttribute.synthetic()); } @@ -1041,7 +1042,7 @@ static void writeMetadataAttr(PlanStreamOutput out, MetadataAttribute metadataAt out.writeString(metadataAttribute.dataType().typeName()); out.writeOptionalString(metadataAttribute.qualifier()); out.writeEnum(metadataAttribute.nullable()); - out.writeLong(Long.parseLong(metadataAttribute.id().toString())); + out.writeLong(stringToLong(metadataAttribute.id().toString())); out.writeBoolean(metadataAttribute.synthetic()); out.writeBoolean(metadataAttribute.searchable()); } @@ -1061,7 +1062,7 @@ static void writeUnsupportedAttr(PlanStreamOutput out, UnsupportedAttribute unsu out.writeString(unsupportedAttribute.name()); writeUnsupportedEsField(out, unsupportedAttribute.field()); out.writeOptionalString(unsupportedAttribute.hasCustomMessage() ? unsupportedAttribute.unresolvedMessage() : null); - out.writeLong(Long.parseLong(unsupportedAttribute.id().toString())); + out.writeLong(stringToLong(unsupportedAttribute.id().toString())); } // -- EsFields @@ -1735,7 +1736,7 @@ static void writeAlias(PlanStreamOutput out, Alias alias) throws IOException { out.writeString(alias.name()); out.writeOptionalString(alias.qualifier()); out.writeExpression(alias.child()); - out.writeLong(Long.parseLong(alias.id().toString())); + out.writeLong(stringToLong(alias.id().toString())); out.writeBoolean(alias.synthetic()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 8a13c80c0ea68..223d318a64324 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -73,7 +73,9 @@ import java.util.function.BiFunction; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.parseTemporalAmout; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIntegral; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.TIME_DURATION; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; @@ -124,11 +126,11 @@ public Literal visitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { Number number; try { - number = StringUtils.parseIntegral(text); + number = stringToIntegral(text); } catch (InvalidArgumentException siae) { // if it's too large, then quietly try to parse as a float instead try { - return new Literal(source, StringUtils.parseDouble(text), DataTypes.DOUBLE); + return new Literal(source, EsqlDataTypeConverter.stringToDouble(text), DataTypes.DOUBLE); } catch (InvalidArgumentException ignored) {} throw new ParsingException(source, siae.getMessage()); @@ -161,7 +163,9 @@ public Object visitNumericArrayLiteral(EsqlBaseParser.NumericArrayLiteralContext source, mapNumbers( numbers, - (no, dt) -> dt == DataTypes.UNSIGNED_LONG ? no.longValue() : asLongUnsigned(BigInteger.valueOf(no.longValue())) + (no, dt) -> dt == DataTypes.UNSIGNED_LONG + ? no.longValue() + : bigIntegerToUnsignedLong(BigInteger.valueOf(no.longValue())) ), DataTypes.UNSIGNED_LONG ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 5ae0584d28a44..64ce1633e8772 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -64,6 +64,7 @@ import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.plan.logical.Enrich.Mode; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.ql.parser.ParserUtils.visitList; @@ -250,7 +251,7 @@ public PlanFactory visitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { @Override public PlanFactory visitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { Source source = source(ctx); - int limit = Integer.parseInt(ctx.INTEGER_LITERAL().getText()); + int limit = stringToInt(ctx.INTEGER_LITERAL().getText()); return input -> new Limit(source, new Literal(source, limit, DataTypes.INTEGER), input); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 37e87a456a4d0..0afa6179fd3c8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -59,6 +58,8 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.HOUR_MINUTE_SECOND; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; @@ -191,14 +192,14 @@ static Query translate(BinaryComparison bc, TranslatorHandler handler) { format = formatter.pattern(); isDateLiteralComparison = true; } else if (attribute.dataType() == IP && value instanceof BytesRef bytesRef) { - value = DocValueFormat.IP.format(bytesRef); + value = ipToString(bytesRef); } else if (attribute.dataType() == VERSION) { // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not // encoded) representation of the version as it'll do the encoding itself. if (value instanceof BytesRef bytesRef) { - value = new Version(bytesRef).toString(); + value = versionToString(bytesRef); } else if (value instanceof Version version) { - value = version.toString(); + value = versionToString(version); } } else if (attribute.dataType() == UNSIGNED_LONG && value instanceof Long ul) { value = unsignedLongAsNumber(ul); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 34a31ac7e656d..aad80b6c673ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -99,6 +99,7 @@ import static java.util.stream.Collectors.joining; import static org.elasticsearch.compute.operator.LimitOperator.Factory; import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToInt; /** * The local execution planner takes a plan (represented as PlanNode tree / digraph) as input and creates the corresponding @@ -366,7 +367,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte int limit; if (topNExec.limit() instanceof Literal literal) { - limit = Integer.parseInt(literal.value().toString()); + limit = stringToInt(literal.value().toString()); } else { throw new EsqlIllegalArgumentException("limit only supported with literal values"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java index 51f96196f29d7..82e7fc2e9fc88 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypeConverter.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.time.DateFormatter; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; @@ -19,8 +20,12 @@ import org.elasticsearch.xpack.ql.type.Converter; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypeConverter; +import org.elasticsearch.xpack.ql.util.NumericUtils; +import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; +import java.math.BigInteger; import java.time.Duration; import java.time.Instant; import java.time.Period; @@ -30,12 +35,19 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.ql.type.DataTypes.NULL; import static org.elasticsearch.xpack.ql.type.DataTypes.isPrimitive; import static org.elasticsearch.xpack.ql.type.DataTypes.isString; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asUnsignedLong; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.UNSPECIFIED; public class EsqlDataTypeConverter { @@ -161,10 +173,13 @@ public static TemporalAmount parseTemporalAmout(Number value, String qualifier, }; } - private static ChronoField stringToChrono(Object value) { + /** + * The following conversions are used by DateExtract. + */ + private static ChronoField stringToChrono(Object field) { ChronoField chronoField = null; try { - BytesRef br = BytesRefs.toBytesRef(value); + BytesRef br = BytesRefs.toBytesRef(field); chronoField = ChronoField.valueOf(br.utf8ToString().toUpperCase(Locale.ROOT)); } catch (Exception e) { return null; @@ -172,10 +187,6 @@ private static ChronoField stringToChrono(Object value) { return chronoField; } - private static BytesRef stringToIP(BytesRef value) { - return parseIP(value.utf8ToString()); - } - public static long chronoToLong(long dateTime, BytesRef chronoField, ZoneId zone) { ChronoField chrono = ChronoField.valueOf(chronoField.utf8ToString().toUpperCase(Locale.ROOT)); return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chrono); @@ -185,6 +196,41 @@ public static long chronoToLong(long dateTime, ChronoField chronoField, ZoneId z return Instant.ofEpochMilli(dateTime).atZone(zone).getLong(chronoField); } + /** + * The following conversions are between String and other data types. + */ + public static BytesRef stringToIP(BytesRef field) { + return StringUtils.parseIP(field.utf8ToString()); + } + + public static BytesRef stringToIP(String field) { + return StringUtils.parseIP(field); + } + + public static String ipToString(BytesRef field) { + return DocValueFormat.IP.format(field); + } + + public static BytesRef stringToVersion(BytesRef field) { + return new Version(field.utf8ToString()).toBytesRef(); + } + + public static String versionToString(BytesRef field) { + return new Version(field).toString(); + } + + public static String versionToString(Version field) { + return field.toString(); + } + + public static String spatialToString(BytesRef field) { + return UNSPECIFIED.wkbToWkt(field); + } + + public static BytesRef stringToSpatial(String field) { + return UNSPECIFIED.wktToWkb(field); + } + public static long dateTimeToLong(String dateTime) { return DEFAULT_DATE_TIME_FORMATTER.parseMillis(dateTime); } @@ -201,12 +247,109 @@ public static String dateTimeToString(long dateTime, DateFormatter formatter) { return formatter == null ? dateTimeToString(dateTime) : formatter.formatMillis(dateTime); } + public static BytesRef numericBooleanToString(Object field) { + return new BytesRef(String.valueOf(field)); + } + + public static boolean stringToBoolean(String field) { + return Boolean.parseBoolean(field); + } + + public static int stringToInt(String field) { + try { + return Integer.parseInt(field); + } catch (NumberFormatException nfe) { + try { + return safeToInt(stringToDouble(field)); + } catch (Exception e) { + throw new InvalidArgumentException(nfe, "Cannot parse number [{}]", field); + } + } + } + + public static long stringToLong(String field) { + try { + return StringUtils.parseLong(field); + } catch (InvalidArgumentException iae) { + try { + return safeDoubleToLong(stringToDouble(field)); + } catch (Exception e) { + throw new InvalidArgumentException(iae, "Cannot parse number [{}]", field); + } + } + } + + public static double stringToDouble(String field) { + return StringUtils.parseDouble(field); + } + + public static BytesRef unsignedLongToString(long number) { + return new BytesRef(unsignedLongAsNumber(number).toString()); + } + + public static long stringToUnsignedLong(String field) { + return asLongUnsigned(safeToUnsignedLong(field)); + } + + public static Number stringToIntegral(String field) { + return StringUtils.parseIntegral(field); + } + + /** + * The following conversion are between unsignedLong and other numeric data types. + */ + public static double unsignedLongToDouble(long number) { + return NumericUtils.unsignedLongAsNumber(number).doubleValue(); + } + + public static long doubleToUnsignedLong(double number) { + return NumericUtils.asLongUnsigned(safeToUnsignedLong(number)); + } + + public static int unsignedLongToInt(long number) { + Number n = NumericUtils.unsignedLongAsNumber(number); + int i = n.intValue(); + if (i != n.longValue()) { + throw new InvalidArgumentException("[{}] out of [integer] range", n); + } + return i; + } + + public static long intToUnsignedLong(int number) { + return longToUnsignedLong(number, false); + } + + public static long unsignedLongToLong(long number) { + return DataTypeConverter.safeToLong(unsignedLongAsNumber(number)); + } + + public static long longToUnsignedLong(long number, boolean allowNegative) { + return allowNegative == false ? NumericUtils.asLongUnsigned(safeToUnsignedLong(number)) : NumericUtils.asLongUnsigned(number); + } + + public static long bigIntegerToUnsignedLong(BigInteger field) { + BigInteger unsignedLong = asUnsignedLong(field); + return NumericUtils.asLongUnsigned(unsignedLong); + } + + public static BigInteger unsignedLongToBigInteger(long number) { + return NumericUtils.unsignedLongAsBigInteger(number); + } + + public static boolean unsignedLongToBoolean(long number) { + Number n = NumericUtils.unsignedLongAsNumber(number); + return n instanceof BigInteger || n.longValue() != 0; + } + + public static long booleanToUnsignedLong(boolean number) { + return number ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; + } + public enum EsqlConverter implements Converter { STRING_TO_DATE_PERIOD(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.DATE_PERIOD)), STRING_TO_TIME_DURATION(x -> EsqlDataTypeConverter.parseTemporalAmount(x, EsqlDataTypes.TIME_DURATION)), - STRING_TO_CHRONO_FIELD(EsqlDataTypeConverter::stringToChrono), - STRING_TO_IP(x -> EsqlDataTypeConverter.stringToIP((BytesRef) x)); + STRING_TO_CHRONO_FIELD(EsqlDataTypeConverter::stringToChrono); private static final String NAME = "esql-converter"; private final Function converter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index 0309bcce85581..22a00bb3684a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -13,6 +13,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -55,7 +57,10 @@ public static Iterable parameters() { ); // random strings that don't look like a double TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.DOUBLE, bytesRef -> null, bytesRef -> { - var exception = expectThrows(NumberFormatException.class, () -> Double.parseDouble(bytesRef.utf8ToString())); + var exception = expectThrows( + InvalidArgumentException.class, + () -> EsqlDataTypeConverter.stringToDouble(bytesRef.utf8ToString()) + ); return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: " + exception diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index 4402c6d8529b4..3a6cb86b7a3c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -71,7 +71,7 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) ); // from doubles within Integer's range @@ -228,7 +228,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); // strings of random doubles outside Integer's range, positive @@ -249,7 +251,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index 030c219b75e2f..031ce6193bcc4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -50,7 +50,7 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) ); // from doubles within long's range @@ -179,7 +179,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); // strings of random doubles outside integer's range, positive @@ -200,7 +202,9 @@ public static Iterable parameters() { bytesRef -> null, bytesRef -> List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: Cannot parse number [" + + ((BytesRef) bytesRef).utf8ToString() + + "]" ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 8cbeca67d0abd..3c1bf69a78716 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -15,13 +15,15 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.NumericUtils; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; + public class Log10Tests extends AbstractFunctionTestCase { public Log10Tests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -54,7 +56,7 @@ public static Iterable parameters() { suppliers, "Log10UnsignedLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, - ul -> Math.log10(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + ul -> Math.log10(ul == null ? null : unsignedLongToDouble(bigIntegerToUnsignedLong(ul))), BigInteger.ONE, UNSIGNED_LONG_MAX, List.of() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index 55a479a3d2b2c..29e75bb3f0225 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -22,6 +22,8 @@ import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; + public class SqrtTests extends AbstractFunctionTestCase { public SqrtTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -54,7 +56,7 @@ public static Iterable parameters() { suppliers, "SqrtUnsignedLongEvaluator[val=" + read + "]", DataTypes.DOUBLE, - ul -> Math.sqrt(ul == null ? null : NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), + ul -> Math.sqrt(ul == null ? null : unsignedLongToDouble(NumericUtils.asLongUnsigned(ul))), BigInteger.ZERO, UNSIGNED_LONG_MAX, List.of() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java index b1070cb7eb12b..c6c8826c6805a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java @@ -25,6 +25,7 @@ import java.util.function.Supplier; import java.util.stream.DoubleStream; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble; import static org.hamcrest.Matchers.equalTo; public class MvAvgTests extends AbstractMultivalueFunctionTestCase { @@ -53,7 +54,7 @@ public static Iterable parameters() { * So we have to go back to encoded `long` and then convert to double * using the production conversion. That'll round in the same way. */ - (size, data) -> avg.apply(size, data.mapToDouble(v -> NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) + (size, data) -> avg.apply(size, data.mapToDouble(v -> unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } From 8f3c4108f393ce03e1eedf290a2ceda96324612b Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 27 Mar 2024 06:07:38 +0100 Subject: [PATCH 194/214] Make HdfsFixture startup more robust (#106777) --- .../test/fixtures/hdfs/HdfsFixture.java | 34 +++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java index 18d406e2a97a0..37d9fc9f536ca 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java +++ b/test/fixtures/hdfs-fixture/src/main/java/org/elasticsearch/test/fixtures/hdfs/HdfsFixture.java @@ -41,6 +41,7 @@ import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.net.ServerSocket; +import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -185,14 +186,39 @@ public void setupHA() throws IOException { private void startMinHdfs() throws Exception { Path baseDir = temporaryFolder.newFolder("baseDir").toPath(); + int maxAttempts = 3; + for (int attempt = 1; attempt <= maxAttempts; attempt++) { + try { + Path hdfsHome = createHdfsDataFolder(baseDir); + tryStartingHdfs(hdfsHome); + break; + } catch (IOException e) { + // Log the exception + System.out.println("Attempt " + attempt + " failed with error: " + e.getMessage()); + // If the maximum number of attempts is reached, rethrow the exception + FileUtils.deleteDirectory(baseDir.toFile()); + + if (attempt == maxAttempts) { + throw e; + } + } + } + } + + private static Path createHdfsDataFolder(Path baseDir) throws IOException { if (System.getenv("HADOOP_HOME") == null) { Path hadoopHome = baseDir.resolve("hadoop-home"); Files.createDirectories(hadoopHome); System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); } // hdfs-data/, where any data is going - Path hdfsHome = baseDir.resolve("hdfs-data"); - new File(hdfsHome.toFile(), "data").mkdirs(); + Path hdfsData = baseDir.resolve("hdfs-data"); + Files.createDirectories(hdfsData); + return hdfsData; + } + + private void tryStartingHdfs(Path hdfsHome) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, + InvocationTargetException, IOException, URISyntaxException { // configure cluster cfg = new Configuration(); cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); @@ -218,11 +244,7 @@ private void startMinHdfs() throws Exception { UserGroupInformation.setConfiguration(cfg); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); - // if(isSecure()) { builder.nameNodePort(explicitPort); - // } else { - // builder.nameNodePort(explicitPort); - // } if (isHA()) { MiniDFSNNTopology.NNConf nn1 = new MiniDFSNNTopology.NNConf("nn1").setIpcPort(0); MiniDFSNNTopology.NNConf nn2 = new MiniDFSNNTopology.NNConf("nn2").setIpcPort(0); From 09583d99b11554e945c63b902adfc3a61013e9fa Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 27 Mar 2024 08:11:25 +0100 Subject: [PATCH 195/214] Do not run :plugins:repository-hdfs:test on windows (#106765) before refactoring the hdfs test fixture setup this was implicitly achieved by disabling test task for docker agent not available on windows. --- plugins/repository-hdfs/build.gradle | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 49fc88a15f7d3..2d32d1827cb13 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -7,6 +7,7 @@ */ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' @@ -106,6 +107,12 @@ tasks.register("yamlRestTestHdfs2", RestIntegTestTask) { classpath = sourceSets.yamlRestTest.runtimeClasspath + configurations.hdfsFixture2 } +tasks.named("test").configure { + onlyIf("Not running on windows") { + OS.current().equals(OS.WINDOWS) == false + } +} + tasks.named("check").configure { dependsOn(tasks.withType(RestIntegTestTask)) } From 84918d936c4eca7e8a70c40205eed52c37a039c4 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 27 Mar 2024 09:33:22 +0200 Subject: [PATCH 196/214] Handle pass-through subfields with deep nesting (#106767) * Handle pass-through subfields with deep nesting * Update docs/changelog/106767.yaml * fix comment * test fixes * yaml tests * refine test --- docs/changelog/106767.yaml | 5 + .../test/data_stream/150_tsdb.yml | 229 +++++++++++++++++- .../index/mapper/RootObjectMapper.java | 61 ++++- .../index/mapper/RootObjectMapperTests.java | 97 ++++++++ 4 files changed, 377 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/106767.yaml diff --git a/docs/changelog/106767.yaml b/docs/changelog/106767.yaml new file mode 100644 index 0000000000000..8541e1b14f275 --- /dev/null +++ b/docs/changelog/106767.yaml @@ -0,0 +1,5 @@ +pr: 106767 +summary: Handle pass-through subfields with deep nesting +area: Mapping +type: bug +issues: [] diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index b37317208d0dc..683cf675cda8e 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -466,13 +466,13 @@ dynamic templates with nesting: refresh: true body: - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5" }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5", "attributes.a.much.deeper.nested.dim": "AC" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5" }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "resource.attributes.another.dim1": "1", "attributes.dim2": "C", "attributes.another.dim2": "10.5", "attributes.a.much.deeper.nested.dim": "AC" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5" }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5", "attributes.a.much.deeper.nested.dim": "BD" }' - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' - - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5" }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "resource.attributes.another.dim1": "2", "attributes.dim2": "D", "attributes.another.dim2": "20.5", "attributes.a.much.deeper.nested.dim": "BD" }' - do: search: @@ -498,7 +498,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -517,7 +517,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -536,7 +536,7 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } - do: @@ -555,7 +555,220 @@ dynamic templates with nesting: field: _tsid - length: { aggregations.filterA.tsids.buckets: 1 } - - match: { aggregations.filterA.tsids.buckets.0.key: "MK0AtuFZowY4QPzoYEAZNK6pWmkqIGKYiosO9O4X2dfFL8p_4TfsFAUUYYv9EqSmEQ" } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + a.much.deeper.nested.dim: AC + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "NNnsRFDTqKogyRBhOBQclM4BkssYqVppKiBimIqLDvTuF9nXxZWMD04YHQKL09tJYL5G4yo" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + +--- +dynamic templates with incremental indexing: + - skip: + version: " - 8.12.99" + reason: "Support for dynamic fields was added in 8.13" + - do: + allowed_warnings: + - "index template [my-dynamic-template] has index patterns [k9s*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-dynamic-template] will take precedence during new index creation" + indices.put_index_template: + name: my-dynamic-template + body: + index_patterns: [k9s*] + data_stream: {} + template: + settings: + index: + number_of_shards: 1 + mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z + + mappings: + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + resource: + type: object + properties: + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + dynamic_templates: + - counter_metric: + mapping: + type: integer + time_series_metric: counter + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim1": "A", "attributes.dim2": "C" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim1": "A", "attributes.dim2": "C" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim1": "B", "attributes.dim2": "D" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim1": "B", "attributes.dim2": "D" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:08.138Z","data": "110", "resource.attributes.another.dim1": "1", "attributes.another.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:09.138Z","data": "120", "resource.attributes.another.dim1": "1", "attributes.another.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:10.138Z","data": "130", "resource.attributes.another.dim1": "2", "attributes.another.dim2": "20.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:04:10.238Z","data": "140", "resource.attributes.another.dim1": "2", "attributes.another.dim2": "20.5" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:08.138Z","data": "210", "resource.attributes.another.deeper.dim1": "1", "attributes.another.deeper.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:09.138Z","data": "220", "resource.attributes.another.deeper.dim1": "1", "attributes.another.deeper.dim2": "10.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:10.138Z","data": "230", "resource.attributes.another.deeper.dim1": "2", "attributes.another.deeper.dim2": "20.5" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:05:10.238Z","data": "240", "resource.attributes.another.deeper.dim1": "2", "attributes.another.deeper.dim2": "20.5" }' + + - do: + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:08.138Z","data": "310", "attributes.a.much.deeper.nested.dim": "AC" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:09.138Z","data": "320", "attributes.a.much.deeper.nested.dim": "AC" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:10.138Z","data": "330", "attributes.a.much.deeper.nested.dim": "BD" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:06:10.238Z","data": "340", "attributes.a.much.deeper.nested.dim": "BD" }' + + - do: + search: + index: k9s + body: + size: 0 + + - match: { hits.total.value: 16 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + dim1: A + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + dim2: C + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + another.deeper.dim1: 1 + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + another.deeper.dim2: 10.5 + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + a.much.deeper.nested.dim: AC + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 82ff9ef818579..90d9c879c57e1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -128,19 +128,22 @@ public RootObjectMapper build(MapperBuilderContext context) { } Map getAliasMappers(Map mappers, MapperBuilderContext context) { - Map aliasMappers = new HashMap<>(); + Map newMappers = new HashMap<>(); Map objectIntermediates = new HashMap<>(1); - getAliasMappers(mappers, aliasMappers, objectIntermediates, context, 0); + Map objectIntermediatesFullName = new HashMap<>(1); + getAliasMappers(mappers, mappers, newMappers, objectIntermediates, objectIntermediatesFullName, context, 0); for (var entry : objectIntermediates.entrySet()) { - aliasMappers.put(entry.getKey(), entry.getValue().build(context)); + newMappers.put(entry.getKey(), entry.getValue().build(context)); } - return aliasMappers; + return newMappers; } void getAliasMappers( Map mappers, + Map topLevelMappers, Map aliasMappers, Map objectIntermediates, + Map objectIntermediatesFullName, MapperBuilderContext context, int level ) { @@ -179,32 +182,76 @@ void getAliasMappers( ).build(context); aliasMappers.put(aliasMapper.simpleName(), aliasMapper); } else { + conflict = topLevelMappers.get(fieldNameParts[0]); + if (conflict != null) { + if (isConflictingObject(conflict, fieldNameParts)) { + throw new IllegalArgumentException( + "Conflicting objects created during alias generation for pass-through field: [" + + conflict.name() + + "]" + ); + } + } + // Nest the alias within object(s). String realFieldName = fieldNameParts[fieldNameParts.length - 1]; Mapper.Builder fieldBuilder = new FieldAliasMapper.Builder(realFieldName).path( fieldMapper.mappedFieldType.name() ); + ObjectMapper.Builder intermediate = null; for (int i = fieldNameParts.length - 2; i >= 0; --i) { String intermediateObjectName = fieldNameParts[i]; - ObjectMapper.Builder intermediate = objectIntermediates.computeIfAbsent( - intermediateObjectName, + intermediate = objectIntermediatesFullName.computeIfAbsent( + concatStrings(fieldNameParts, i), s -> new ObjectMapper.Builder(intermediateObjectName, ObjectMapper.Defaults.SUBOBJECTS) ); intermediate.add(fieldBuilder); fieldBuilder = intermediate; } + objectIntermediates.putIfAbsent(fieldNameParts[0], intermediate); } } } } } else if (mapper instanceof ObjectMapper objectMapper) { // Call recursively to check child fields. The level guards against long recursive call sequences. - getAliasMappers(objectMapper.mappers, aliasMappers, objectIntermediates, context, level + 1); + getAliasMappers( + objectMapper.mappers, + topLevelMappers, + aliasMappers, + objectIntermediates, + objectIntermediatesFullName, + context, + level + 1 + ); } } } } + private static String concatStrings(String[] parts, int last) { + StringBuilder builder = new StringBuilder(); + for (int i = 0; i <= last; i++) { + builder.append('.'); + builder.append(parts[i]); + } + return builder.toString(); + } + + private static boolean isConflictingObject(Mapper mapper, String[] parts) { + for (int i = 0; i < parts.length - 1; i++) { + if (mapper == null) { + return true; + } + if (mapper instanceof ObjectMapper objectMapper) { + mapper = objectMapper.getMapper(parts[i + 1]); + } else { + return true; + } + } + return mapper == null; + } + private final Explicit dynamicDateTimeFormatters; private final Explicit dateDetection; private final Explicit numericDetection; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index 3e90459746a19..7a7f1668b4636 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -391,6 +391,100 @@ public void testPassThroughObjectNested() throws IOException { assertThat(mapperService.mappingLookup().getMapper("attributes.another.dim"), instanceOf(KeywordFieldMapper.class)); } + public void testPassThroughObjectNestedWithDuplicateNames() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("resource").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("dim").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimA").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimB").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("another.dim").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimC").field("type", "keyword").endObject(); + b.startObject("more.attributes.another.dimD").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + })); + + assertThat(mapperService.mappingLookup().getMapper("dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("resource.attributes.dim"), instanceOf(KeywordFieldMapper.class)); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimA"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("resource.attributes.more.attributes.another.dimA"), + instanceOf(KeywordFieldMapper.class) + ); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimB"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("resource.attributes.more.attributes.another.dimB"), + instanceOf(KeywordFieldMapper.class) + ); + + assertThat(mapperService.mappingLookup().objectMappers().get("another").getMapper("dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("attributes.another.dim"), instanceOf(KeywordFieldMapper.class)); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimC"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("attributes.more.attributes.another.dimC"), + instanceOf(KeywordFieldMapper.class) + ); + assertThat( + mapperService.mappingLookup().objectMappers().get("more.attributes.another").getMapper("dimD"), + instanceOf(FieldAliasMapper.class) + ); + assertThat( + mapperService.mappingLookup().getMapper("attributes.more.attributes.another.dimD"), + instanceOf(KeywordFieldMapper.class) + ); + } + + public void testPassThroughObjectNestedWithConflictingNames() throws IOException { + MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> { + b.startObject("resource").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("dim").field("type", "keyword").endObject(); + b.startObject("resource.attributes.another.dim").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertEquals( + "Failed to parse mapping: Conflicting objects created during alias generation for pass-through field: [resource]", + e.getMessage() + ); + } + public void testAliasMappersCreatesAlias() throws Exception { var context = MapperBuilderContext.root(false, false); Map aliases = new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( @@ -445,6 +539,7 @@ public void testAliasMappersExitsInDeepNesting() throws Exception { var context = MapperBuilderContext.root(false, false); Map aliases = new HashMap<>(); var objectIntermediates = new HashMap(1); + var objectIntermediatesFullPath = new HashMap(1); new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( Map.of( "labels", @@ -457,8 +552,10 @@ public void testAliasMappersExitsInDeepNesting() throws Exception { Explicit.EXPLICIT_FALSE ) ), + Map.of(), aliases, objectIntermediates, + objectIntermediatesFullPath, context, 1_000_000 ); From 60028bf07debafbbca59443c93fe306e2b6f078e Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Wed, 27 Mar 2024 08:38:56 +0100 Subject: [PATCH 197/214] Add background filters of significant terms aggregations to can match query. (#106564) * Add background filters of significant terms aggregations to can match query. * Fix NPE * Unit tests * Update docs/changelog/106564.yaml * Update 106564.yaml * Make aggregation queries in can match phase more generic. * Copy source to preserve other relevant fields. * Replace copy constructor by shallowCopy --- docs/changelog/106564.yaml | 5 + .../action/search/CanMatchNodeRequest.java | 36 +++++- .../aggregations/AggregationBuilder.java | 6 + .../SignificantTermsAggregationBuilder.java | 6 + .../CanMatchPreFilterSearchPhaseTests.java | 122 +++++++++++++++++- 5 files changed, 167 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/106564.yaml diff --git a/docs/changelog/106564.yaml b/docs/changelog/106564.yaml new file mode 100644 index 0000000000000..a4e986c4b7d18 --- /dev/null +++ b/docs/changelog/106564.yaml @@ -0,0 +1,5 @@ +pr: 106564 +summary: Fix the background set of significant terms aggregations in case the data is in different shards than the foreground set +area: Search +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java index 865b7bdf8abfa..bc50a9f8f0c2c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchNodeRequest.java @@ -18,9 +18,12 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.builder.SubSearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; @@ -31,6 +34,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; @@ -129,7 +133,7 @@ public CanMatchNodeRequest( long nowInMillis, @Nullable String clusterAlias ) { - this.source = searchRequest.source(); + this.source = getCanMatchSource(searchRequest); this.indicesOptions = indicesOptions; this.shards = new ArrayList<>(shards); this.searchType = searchRequest.searchType(); @@ -146,6 +150,36 @@ public CanMatchNodeRequest( indices = shards.stream().map(Shard::getOriginalIndices).flatMap(Arrays::stream).distinct().toArray(String[]::new); } + private static void collectAggregationQueries(Collection aggregations, List aggregationQueries) { + for (AggregationBuilder aggregation : aggregations) { + QueryBuilder aggregationQuery = aggregation.getQuery(); + if (aggregationQuery != null) { + aggregationQueries.add(aggregationQuery); + } + collectAggregationQueries(aggregation.getSubAggregations(), aggregationQueries); + } + } + + private SearchSourceBuilder getCanMatchSource(SearchRequest searchRequest) { + // Aggregations may use a different query than the top-level search query. An example is + // the significant terms aggregation, which also collects data over a background that + // typically much larger than the search query. To accommodate for this, we take the union + // of all queries to determine whether a request can match. + List aggregationQueries = new ArrayList<>(); + if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { + collectAggregationQueries(searchRequest.source().aggregations().getAggregatorFactories(), aggregationQueries); + } + if (aggregationQueries.isEmpty()) { + return searchRequest.source(); + } else { + List subSearches = new ArrayList<>(searchRequest.source().subSearches()); + for (QueryBuilder aggregationQuery : aggregationQueries) { + subSearches.add(new SubSearchSourceBuilder(aggregationQuery)); + } + return searchRequest.source().shallowCopy().subSearches(subSearches); + } + } + public CanMatchNodeRequest(StreamInput in) throws IOException { super(in); source = in.readOptionalWriteable(SearchSourceBuilder::new); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 42eb80cfb1316..ed1c442e1dc28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -97,6 +98,11 @@ public Collection getSubAggregations() { return factoriesBuilder.getAggregatorFactories(); } + /** Return the aggregation's query if it's different from the search query, or null otherwise. */ + public QueryBuilder getQuery() { + return null; + } + /** Return the configured set of pipeline aggregations **/ public Collection getPipelineAggregations() { return factoriesBuilder.getPipelineAggregatorFactories(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java index ce911379d9ddb..e8d5050129e27 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregationBuilder.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -257,6 +258,11 @@ public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgrou return this; } + @Override + public QueryBuilder getQuery() { + return backgroundFilter != null ? backgroundFilter : QueryBuilders.matchAllQuery(); + } + /** * Set terms to include and exclude from the aggregation results */ diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 7166664181eb1..70c4d73f578b3 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -30,21 +30,24 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.CoordinatorRewriteContextProvider; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.IndexLongFieldRange; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardLongFieldRange; import org.elasticsearch.search.CanMatchShardResponse; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.sort.MinAndMax; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -497,14 +500,14 @@ public void testCanMatchFilteringOnCoordinatorThatCanBeSkipped() throws Exceptio regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, (updatedSearchShardIterators, requests) -> { List skippedShards = updatedSearchShardIterators.stream().filter(SearchShardIterator::skip).toList(); - ; List nonSkippedShards = updatedSearchShardIterators.stream() .filter(searchShardIterator -> searchShardIterator.skip() == false) .toList(); - ; int regularIndexShardCount = (int) updatedSearchShardIterators.stream() .filter(s -> regularIndices.contains(s.shardId().getIndex())) @@ -568,6 +571,8 @@ public void testCanMatchFilteringOnCoordinatorParsingFails() throws Exception { regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, this::assertAllShardsAreQueried ); } @@ -624,6 +629,99 @@ public void testCanMatchFilteringOnCoordinatorThatCanNotBeSkipped() throws Excep regularIndices, contextProviderBuilder.build(), queryBuilder, + List.of(), + null, + this::assertAllShardsAreQueried + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withDefaultBackgroundFilter() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2100).to(2200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms"); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + null, + // The default background filter matches the whole index, so all shards must be queried. + this::assertAllShardsAreQueried + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withBackgroundFilter() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + Index index4 = new Index("index4", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + contextProviderBuilder.addIndexMinMaxTimestamps(index4, DataStream.TIMESTAMP_FIELD_NAME, 3000, 3999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(3100).to(3200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms").backgroundFilter( + new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(0).to(1999) + ); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + null, + (updatedSearchShardIterators, requests) -> { + // The search query matches index4, the background query matches index1 and index2, + // so index3 is the only one that must be skipped. + for (SearchShardIterator shard : updatedSearchShardIterators) { + if (shard.shardId().getIndex().getName().equals("index3")) { + assertTrue(shard.skip()); + } else { + assertFalse(shard.skip()); + } + } + } + ); + } + + public void testCanMatchFilteringOnCoordinator_withSignificantTermsAggregation_withSuggest() throws Exception { + Index index1 = new Index("index1", UUIDs.base64UUID()); + Index index2 = new Index("index2", UUIDs.base64UUID()); + Index index3 = new Index("index3", UUIDs.base64UUID()); + + StaticCoordinatorRewriteContextProviderBuilder contextProviderBuilder = new StaticCoordinatorRewriteContextProviderBuilder(); + contextProviderBuilder.addIndexMinMaxTimestamps(index1, DataStream.TIMESTAMP_FIELD_NAME, 0, 999); + contextProviderBuilder.addIndexMinMaxTimestamps(index2, DataStream.TIMESTAMP_FIELD_NAME, 1000, 1999); + contextProviderBuilder.addIndexMinMaxTimestamps(index3, DataStream.TIMESTAMP_FIELD_NAME, 2000, 2999); + + QueryBuilder query = new BoolQueryBuilder().filter(new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2100).to(2200)); + AggregationBuilder aggregation = new SignificantTermsAggregationBuilder("significant_terms").backgroundFilter( + new RangeQueryBuilder(DataStream.TIMESTAMP_FIELD_NAME).from(2000).to(2300) + ); + SuggestBuilder suggest = new SuggestBuilder().setGlobalText("test"); + + assignShardsAndExecuteCanMatchPhase( + List.of(), + List.of(index1, index2, index3), + contextProviderBuilder.build(), + query, + List.of(aggregation), + suggest, + // The query and aggregation and match only index3, but suggest should match everything. this::assertAllShardsAreQueried ); } @@ -669,6 +767,8 @@ public void testCanMatchFilteringOnCoordinatorThatCanBeSkippedTsdb() throws Exce List.of(), contextProviderBuilder.build(), queryBuilder, + List.of(), + null, (updatedSearchShardIterators, requests) -> { var skippedShards = updatedSearchShardIterators.stream().filter(SearchShardIterator::skip).toList(); var nonSkippedShards = updatedSearchShardIterators.stream() @@ -713,11 +813,13 @@ private void assertAllShardsAreQueried(List updatedSearchSh assertThat(requests.size(), equalTo(shardsWithPrimariesAssigned)); } - private > void assignShardsAndExecuteCanMatchPhase( + private void assignShardsAndExecuteCanMatchPhase( List dataStreams, List regularIndices, CoordinatorRewriteContextProvider contextProvider, - AbstractQueryBuilder query, + QueryBuilder query, + List aggregations, + SuggestBuilder suggest, BiConsumer, List> canMatchResultsConsumer ) throws Exception { Map lookup = new ConcurrentHashMap<>(); @@ -764,14 +866,20 @@ private > void assignShardsAndExecuteCanMatc searchRequest.allowPartialSearchResults(true); final AliasFilter aliasFilter; - if (randomBoolean()) { + if (aggregations.isEmpty() == false || randomBoolean()) { // Apply the query on the request body SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); searchSourceBuilder.query(query); + for (AggregationBuilder aggregation : aggregations) { + searchSourceBuilder.aggregation(aggregation); + } + if (suggest != null) { + searchSourceBuilder.suggest(suggest); + } searchRequest.source(searchSourceBuilder); // Sometimes apply the same query in the alias filter too - aliasFilter = AliasFilter.of(randomBoolean() ? query : null, Strings.EMPTY_ARRAY); + aliasFilter = AliasFilter.of(aggregations.isEmpty() && randomBoolean() ? query : null, Strings.EMPTY_ARRAY); } else { // Apply the query as an alias filter aliasFilter = AliasFilter.of(query, Strings.EMPTY_ARRAY); From 24e18f67e42d562dc012a6cdd2ed024341b11e5a Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 27 Mar 2024 10:24:19 +0100 Subject: [PATCH 198/214] Fix duplicate TSDB _source mappers (#106792) The tsdb source mappers now have a defined `Mode` by default so the deduplcation logic in the builder needs some adjustment. Found this in an SDH heap dump that had thousands of duplicate source field mappers in it all of a sudden. Also, we can save a little here (thought it only matters for non-default versions of this now) by using the empty string array constant for include and exclude list. --- .../org/elasticsearch/index/mapper/SourceFieldMapper.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 4f3c4814517e5..15770785e11f9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -144,7 +144,8 @@ protected Parameter[] getParameters() { } private boolean isDefault() { - if (mode.get() != null) { + Mode m = mode.get(); + if (m != null && (indexMode == IndexMode.TIME_SERIES && m == Mode.SYNTHETIC) == false) { return false; } if (enabled.get().value() == false) { @@ -169,8 +170,8 @@ public SourceFieldMapper build() { SourceFieldMapper sourceFieldMapper = new SourceFieldMapper( mode.get(), enabled.get(), - includes.getValue().toArray(String[]::new), - excludes.getValue().toArray(String[]::new), + includes.getValue().toArray(Strings.EMPTY_ARRAY), + excludes.getValue().toArray(Strings.EMPTY_ARRAY), indexMode ); if (indexMode != null) { From 8022943c7600bec01ac2ff0b4befd79157cb7813 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 27 Mar 2024 11:01:48 +0100 Subject: [PATCH 199/214] Move SearchHit parsing to test codebase (#106787) Same as the other moves of parser code, this is only used in tests, no need to have all of this code loaded in production. --- .../index/rankeval/RatedSearchHitTests.java | 7 +- .../org/elasticsearch/search/SearchHit.java | 294 +------------- .../org/elasticsearch/search/SearchHits.java | 48 --- .../elasticsearch/search/suggest/Suggest.java | 63 +-- .../completion/CompletionSuggestion.java | 92 +---- .../suggest/phrase/PhraseSuggestion.java | 53 +-- .../search/suggest/term/TermSuggestion.java | 49 +-- .../spi/NamedXContentProviderTests.java | 7 +- .../search/NestedIdentityTests.java | 2 +- .../elasticsearch/search/SearchHitTests.java | 18 +- .../elasticsearch/search/SearchHitsTests.java | 2 +- .../CompletionSuggestionOptionTests.java | 68 +++- .../search/suggest/SuggestTests.java | 75 +++- .../search/suggest/SuggestionEntryTests.java | 52 ++- .../search/suggest/SuggestionOptionTests.java | 30 +- .../search/suggest/SuggestionTests.java | 10 +- .../suggest/TermSuggestionOptionTests.java | 25 +- .../search/SearchResponseUtils.java | 376 +++++++++++++++++- .../registry/ModelRegistryImplTests.java | 13 +- .../DataFrameAnalyticsTaskTests.java | 7 +- .../inference/InferenceRunnerTests.java | 3 +- .../persistence/JobResultsPersisterTests.java | 13 +- .../process/IndexingStateProcessorTests.java | 3 +- 23 files changed, 673 insertions(+), 637 deletions(-) diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java index fc56b0066faa9..415d270ef4fdb 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -34,7 +35,11 @@ public class RatedSearchHitTests extends ESTestCase { ); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), new ParseField("hit")); + PARSER.declareObject( + ConstructingObjectParser.constructorArg(), + (p, c) -> SearchResponseUtils.parseSearchHit(p), + new ParseField("hit") + ); PARSER.declareField( ConstructingObjectParser.constructorArg(), (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()), diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 60ced289929a0..62a62fb93a4a0 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -33,22 +32,15 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.fetch.subphase.LookupField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParser.Token; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -56,7 +48,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -66,10 +57,6 @@ import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * A single search hit. @@ -80,10 +67,10 @@ public final class SearchHit implements Writeable, ToXContentObject, RefCounted private final transient int docId; - private static final float DEFAULT_SCORE = Float.NaN; + static final float DEFAULT_SCORE = Float.NaN; private float score; - private static final int NO_RANK = -1; + static final int NO_RANK = -1; private int rank; private final Text id; @@ -935,258 +922,6 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t return builder; } - // All fields on the root level of the parsed SearhHit are interpreted as metadata fields - // public because we use it in a completion suggestion option - @SuppressWarnings("unchecked") - public static final ObjectParser.UnknownFieldConsumer> unknownMetaFieldConsumer = (map, fieldName, fieldValue) -> { - Map fieldMap = (Map) map.computeIfAbsent( - METADATA_FIELDS, - v -> new HashMap() - ); - if (fieldName.equals(IgnoredFieldMapper.NAME)) { - fieldMap.put(fieldName, new DocumentField(fieldName, (List) fieldValue)); - } else { - fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue))); - } - }; - - /** - * This parser outputs a temporary map of the objects needed to create the - * SearchHit instead of directly creating the SearchHit. The reason for this - * is that this way we can reuse the parser when parsing xContent from - * {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines - * the output of - * {@link #toInnerXContent(XContentBuilder, org.elasticsearch.xcontent.ToXContent.Params)} - * of the included search hit. The output of the map is used to create the - * actual SearchHit instance via {@link #createFromMap(Map)} - */ - private static final ObjectParser, Void> MAP_PARSER = new ObjectParser<>( - "innerHitParser", - unknownMetaFieldConsumer, - HashMap::new - ); - - static { - declareInnerHitsParseFields(MAP_PARSER); - } - - public static SearchHit fromXContent(XContentParser parser) { - return createFromMap(MAP_PARSER.apply(parser, null)); - } - - public static void declareInnerHitsParseFields(ObjectParser, Void> parser) { - parser.declareString((map, value) -> map.put(Fields._INDEX, value), new ParseField(Fields._INDEX)); - parser.declareString((map, value) -> map.put(Fields._ID, value), new ParseField(Fields._ID)); - parser.declareString((map, value) -> map.put(Fields._NODE, value), new ParseField(Fields._NODE)); - parser.declareField( - (map, value) -> map.put(Fields._SCORE, value), - SearchHit::parseScore, - new ParseField(Fields._SCORE), - ValueType.FLOAT_OR_NULL - ); - parser.declareInt((map, value) -> map.put(Fields._RANK, value), new ParseField(Fields._RANK)); - - parser.declareLong((map, value) -> map.put(Fields._VERSION, value), new ParseField(Fields._VERSION)); - parser.declareLong((map, value) -> map.put(Fields._SEQ_NO, value), new ParseField(Fields._SEQ_NO)); - parser.declareLong((map, value) -> map.put(Fields._PRIMARY_TERM, value), new ParseField(Fields._PRIMARY_TERM)); - parser.declareField( - (map, value) -> map.put(Fields._SHARD, value), - (p, c) -> ShardId.fromString(p.text()), - new ParseField(Fields._SHARD), - ValueType.STRING - ); - parser.declareObject( - (map, value) -> map.put(SourceFieldMapper.NAME, value), - (p, c) -> parseSourceBytes(p), - new ParseField(SourceFieldMapper.NAME) - ); - parser.declareObject( - (map, value) -> map.put(Fields.HIGHLIGHT, value), - (p, c) -> parseHighlightFields(p), - new ParseField(Fields.HIGHLIGHT) - ); - parser.declareObject((map, value) -> { - Map fieldMap = get(Fields.FIELDS, map, new HashMap()); - fieldMap.putAll(value); - map.put(DOCUMENT_FIELDS, fieldMap); - }, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS)); - parser.declareObject( - (map, value) -> map.put(Fields._EXPLANATION, value), - (p, c) -> parseExplanation(p), - new ParseField(Fields._EXPLANATION) - ); - parser.declareObject( - (map, value) -> map.put(NestedIdentity._NESTED, value), - NestedIdentity::fromXContent, - new ParseField(NestedIdentity._NESTED) - ); - parser.declareObject( - (map, value) -> map.put(Fields.INNER_HITS, value), - (p, c) -> parseInnerHits(p), - new ParseField(Fields.INNER_HITS) - ); - - parser.declareField((p, map, context) -> { - XContentParser.Token token = p.currentToken(); - Map matchedQueries = new LinkedHashMap<>(); - if (token == XContentParser.Token.START_OBJECT) { - String fieldName = null; - while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = p.currentName(); - } else if (token.isValue()) { - matchedQueries.put(fieldName, p.floatValue()); - } - } - } else if (token == XContentParser.Token.START_ARRAY) { - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - matchedQueries.put(p.text(), Float.NaN); - } - } - map.put(Fields.MATCHED_QUERIES, matchedQueries); - }, new ParseField(Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); - - parser.declareField( - (map, list) -> map.put(Fields.SORT, list), - SearchSortValues::fromXContent, - new ParseField(Fields.SORT), - ValueType.OBJECT_ARRAY - ); - } - - public static SearchHit createFromMap(Map values) { - String id = get(Fields._ID, values, null); - String index = get(Fields._INDEX, values, null); - String clusterAlias = null; - if (index != null) { - int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); - if (indexOf > 0) { - clusterAlias = index.substring(0, indexOf); - index = index.substring(indexOf + 1); - } - } - ShardId shardId = get(Fields._SHARD, values, null); - String nodeId = get(Fields._NODE, values, null); - final SearchShardTarget shardTarget; - if (shardId != null && nodeId != null) { - assert shardId.getIndexName().equals(index); - shardTarget = new SearchShardTarget(nodeId, shardId, clusterAlias); - index = shardTarget.getIndex(); - clusterAlias = shardTarget.getClusterAlias(); - } else { - shardTarget = null; - } - return new SearchHit( - -1, - get(Fields._SCORE, values, DEFAULT_SCORE), - get(Fields._RANK, values, NO_RANK), - id == null ? null : new Text(id), - get(NestedIdentity._NESTED, values, null), - get(Fields._VERSION, values, -1L), - get(Fields._SEQ_NO, values, SequenceNumbers.UNASSIGNED_SEQ_NO), - get(Fields._PRIMARY_TERM, values, SequenceNumbers.UNASSIGNED_PRIMARY_TERM), - get(SourceFieldMapper.NAME, values, null), - get(Fields.HIGHLIGHT, values, null), - get(Fields.SORT, values, SearchSortValues.EMPTY), - get(Fields.MATCHED_QUERIES, values, null), - get(Fields._EXPLANATION, values, null), - shardTarget, - index, - clusterAlias, - null, - get(Fields.INNER_HITS, values, null), - get(DOCUMENT_FIELDS, values, Collections.emptyMap()), - get(METADATA_FIELDS, values, Collections.emptyMap()), - ALWAYS_REFERENCED // TODO: do we ever want pooling here? - ); - } - - @SuppressWarnings("unchecked") - private static T get(String key, Map map, T defaultValue) { - return (T) map.getOrDefault(key, defaultValue); - } - - private static float parseScore(XContentParser parser) throws IOException { - if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER || parser.currentToken() == XContentParser.Token.VALUE_STRING) { - return parser.floatValue(); - } else { - return Float.NaN; - } - } - - private static BytesReference parseSourceBytes(XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { - // the original document gets slightly modified: whitespaces or - // pretty printing are not preserved, - // it all depends on the current builder settings - builder.copyCurrentStructure(parser); - return BytesReference.bytes(builder); - } - } - - private static Map parseFields(XContentParser parser) throws IOException { - Map fields = new HashMap<>(); - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - DocumentField field = DocumentField.fromXContent(parser); - fields.put(field.getName(), field); - } - return fields; - } - - private static Map parseInnerHits(XContentParser parser) throws IOException { - Map innerHits = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String name = parser.currentName(); - ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser); - ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); - innerHits.put(name, SearchHits.fromXContent(parser)); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - } - return innerHits; - } - - private static Map parseHighlightFields(XContentParser parser) throws IOException { - Map highlightFields = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - HighlightField highlightField = HighlightField.fromXContent(parser); - highlightFields.put(highlightField.name(), highlightField); - } - return highlightFields; - } - - private static Explanation parseExplanation(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParser.Token token; - Float value = null; - String description = null; - List details = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - if (Fields.VALUE.equals(currentFieldName)) { - value = parser.floatValue(); - } else if (Fields.DESCRIPTION.equals(currentFieldName)) { - description = parser.textOrNull(); - } else if (Fields.DETAILS.equals(currentFieldName)) { - ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - details.add(parseExplanation(parser)); - } - } else { - parser.skipChildren(); - } - } - if (value == null) { - throw new ParsingException(parser.getTokenLocation(), "missing explanation value"); - } - if (description == null) { - throw new ParsingException(parser.getTokenLocation(), "missing explanation description"); - } - return Explanation.match(value, description, details); - } - private static void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { builder.startObject(); builder.field(Fields.VALUE, explanation.getValue()); @@ -1251,9 +986,9 @@ public int hashCode() { */ public static final class NestedIdentity implements Writeable, ToXContentFragment { - private static final String _NESTED = "_nested"; - private static final String FIELD = "field"; - private static final String OFFSET = "offset"; + static final String _NESTED = "_nested"; + static final String FIELD = "field"; + static final String OFFSET = "offset"; private final Text field; private final int offset; @@ -1379,25 +1114,6 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I return builder; } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "nested_identity", - true, - ctorArgs -> new NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (NestedIdentity) ctorArgs[2]) - ); - static { - PARSER.declareString(constructorArg(), new ParseField(FIELD)); - PARSER.declareInt(constructorArg(), new ParseField(OFFSET)); - PARSER.declareObject(optionalConstructorArg(), PARSER, new ParseField(_NESTED)); - } - - static NestedIdentity fromXContent(XContentParser parser, Void context) { - return fromXContent(parser); - } - - public static NestedIdentity fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object obj) { if (this == obj) { diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index d559fc60fa72d..15b83b202fd98 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -27,14 +27,10 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; -import java.util.List; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable { public static final SearchHit[] EMPTY = new SearchHit[0]; @@ -310,50 +306,6 @@ public Iterator toXContentChunked(ToXContent.Params params }), ChunkedToXContentHelper.array(Fields.HITS, Iterators.forArray(hits)), ChunkedToXContentHelper.endObject()); } - public static SearchHits fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - } - XContentParser.Token token = parser.currentToken(); - String currentFieldName = null; - List hits = new ArrayList<>(); - TotalHits totalHits = null; - float maxScore = 0f; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (Fields.TOTAL.equals(currentFieldName)) { - // For BWC with nodes pre 7.0 - long value = parser.longValue(); - totalHits = value == -1 ? null : new TotalHits(value, Relation.EQUAL_TO); - } else if (Fields.MAX_SCORE.equals(currentFieldName)) { - maxScore = parser.floatValue(); - } - } else if (token == XContentParser.Token.VALUE_NULL) { - if (Fields.MAX_SCORE.equals(currentFieldName)) { - maxScore = Float.NaN; // NaN gets rendered as null-field - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (Fields.HITS.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - hits.add(SearchHit.fromXContent(parser)); - } - } else { - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (Fields.TOTAL.equals(currentFieldName)) { - totalHits = parseTotalHitsFragment(parser); - } else { - parser.skipChildren(); - } - } - } - return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); - } - @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java index f3371caf4c1a7..3f638bb7f9905 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -8,26 +8,20 @@ package org.elasticsearch.search.suggest; import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; @@ -35,12 +29,9 @@ import java.util.HashMap; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - /** * Top level suggest result, containing the result for each suggestion. */ @@ -125,29 +116,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller - */ - public static Suggest fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - List>> suggestions = new ArrayList<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String currentField = parser.currentName(); - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - Suggestion> suggestion = Suggestion.fromXContent(parser); - if (suggestion != null) { - suggestions.add(suggestion); - } else { - throw new ParsingException( - parser.getTokenLocation(), - String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField) - ); - } - } - return new Suggest(suggestions); - } - public static List>> reduce(Map>> groupedSuggestions) { List>> reduced = new ArrayList<>(groupedSuggestions.size()); for (Map.Entry>> unmergedResults : groupedSuggestions.entrySet()) { @@ -362,33 +330,14 @@ public int hashCode() { return Objects.hash(name, size, entries); } - @SuppressWarnings("unchecked") - public static Suggestion> fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - SetOnce suggestion = new SetOnce<>(); - XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggestion.class, suggestion::set); - return suggestion.get(); - } - - protected static > void parseEntries( - XContentParser parser, - Suggestion suggestion, - CheckedFunction entryParser - ) throws IOException { - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - suggestion.addTerm(entryParser.apply(parser)); - } - } - /** * Represents a part from the suggest text with suggested options. */ public abstract static class Entry implements Iterable, Writeable, ToXContentFragment { - private static final String TEXT = "text"; - private static final String OFFSET = "offset"; - private static final String LENGTH = "length"; + static final String TEXT = "text"; + static final String OFFSET = "offset"; + static final String LENGTH = "length"; protected static final String OPTIONS = "options"; protected Text text; @@ -561,12 +510,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - protected static void declareCommonFields(ObjectParser, Void> parser) { - parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(TEXT)); - parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(OFFSET)); - parser.declareInt((entry, length) -> entry.length = length, new ParseField(LENGTH)); - } - /** * Contains the suggested text with its document frequency and score. */ diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 7210c35d961ac..234ccda755f7b 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -18,24 +18,17 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.search.SearchHit.unknownMetaFieldConsumer; import static org.elasticsearch.search.suggest.Suggest.COMPARATOR; /** @@ -114,12 +107,6 @@ public int hashCode() { return Objects.hash(super.hashCode(), skipDuplicates); } - public static CompletionSuggestion fromXContent(XContentParser parser, String name) throws IOException { - CompletionSuggestion suggestion = new CompletionSuggestion(name, -1, false); - parseEntries(parser, suggestion, CompletionSuggestion.Entry::fromXContent); - return suggestion; - } - private static final class OptionPriorityQueue extends PriorityQueue { OptionPriorityQueue(int maxSize) { super(maxSize); @@ -230,7 +217,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - private Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -241,20 +228,6 @@ protected Option newOption(StreamInput in) throws IOException { return new Option(in); } - private static final ObjectParser PARSER = new ObjectParser<>("CompletionSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public static class Option extends Suggest.Suggestion.Entry.Option { private final Map> contexts; private final ScoreDoc doc; @@ -336,69 +309,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private static final ObjectParser, Void> PARSER = new ObjectParser<>( - "CompletionOptionParser", - unknownMetaFieldConsumer, - HashMap::new - ); - - static { - SearchHit.declareInnerHitsParseFields(PARSER); - PARSER.declareString( - (map, value) -> map.put(Suggestion.Entry.Option.TEXT.getPreferredName(), value), - Suggestion.Entry.Option.TEXT - ); - PARSER.declareFloat( - (map, value) -> map.put(Suggestion.Entry.Option.SCORE.getPreferredName(), value), - Suggestion.Entry.Option.SCORE - ); - PARSER.declareObject( - (map, value) -> map.put(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName(), value), - (p, c) -> parseContexts(p), - CompletionSuggestion.Entry.Option.CONTEXTS - ); - } - - private static Map> parseContexts(XContentParser parser) throws IOException { - Map> contexts = new HashMap<>(); - while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); - String key = parser.currentName(); - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - Set values = new HashSet<>(); - while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); - values.add(parser.text()); - } - contexts.put(key, values); - } - return contexts; - } - - public static Option fromXContent(XContentParser parser) { - Map values = PARSER.apply(parser, null); - - Text text = new Text((String) values.get(Suggestion.Entry.Option.TEXT.getPreferredName())); - Float score = (Float) values.get(Suggestion.Entry.Option.SCORE.getPreferredName()); - @SuppressWarnings("unchecked") - Map> contexts = (Map>) values.get( - CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName() - ); - if (contexts == null) { - contexts = Collections.emptyMap(); - } - - SearchHit hit = null; - // the option either prints SCORE or inlines the search hit - if (score == null) { - hit = SearchHit.createFromMap(values); - score = hit.getScore(); - } - CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(-1, text, score, contexts); - option.setHit(hit); - return option; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java index 1c881a9887583..e92f6bd8d52cd 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestion.java @@ -13,17 +13,10 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - /** * Suggestion entry returned from the {@link PhraseSuggester}. */ @@ -47,12 +40,6 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } - public static PhraseSuggestion fromXContent(XContentParser parser, String name) throws IOException { - PhraseSuggestion suggestion = new PhraseSuggestion(name, -1); - parseEntries(parser, suggestion, PhraseSuggestion.Entry::fromXContent); - return suggestion; - } - public static class Entry extends Suggestion.Entry { protected double cutoffScore = Double.MIN_VALUE; @@ -66,7 +53,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -94,20 +81,6 @@ public void addOption(Option option) { } } - private static final ObjectParser PARSER = new ObjectParser<>("PhraseSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override protected Option newOption(StreamInput in) throws IOException { return new Option(in); @@ -142,30 +115,6 @@ public Option(Text text, Text highlighted, float score) { public Option(StreamInput in) throws IOException { super(in); } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "PhraseOptionParser", - true, - args -> { - Text text = new Text((String) args[0]); - float score = (Float) args[1]; - String highlighted = (String) args[2]; - Text highlightedText = highlighted == null ? null : new Text(highlighted); - Boolean collateMatch = (Boolean) args[3]; - return new Option(text, highlightedText, score, collateMatch); - } - ); - - static { - PARSER.declareString(constructorArg(), TEXT); - PARSER.declareFloat(constructorArg(), SCORE); - PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); - PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); - } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java index ce55385dfa550..eb4edcf0a5aa4 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestion.java @@ -14,18 +14,13 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.Comparator; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - /** * The suggestion responses corresponding with the suggestions in the request. */ @@ -104,13 +99,6 @@ public String getWriteableName() { return TermSuggestionBuilder.SUGGESTION_NAME; } - public static TermSuggestion fromXContent(XContentParser parser, String name) throws IOException { - // the "size" parameter and the SortBy for TermSuggestion cannot be parsed from the response, use default values - TermSuggestion suggestion = new TermSuggestion(name, -1, SortBy.SCORE); - parseEntries(parser, suggestion, TermSuggestion.Entry::fromXContent); - return suggestion; - } - @Override protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); @@ -135,7 +123,7 @@ public Entry(Text text, int offset, int length) { super(text, offset, length); } - private Entry() {} + public Entry() {} public Entry(StreamInput in) throws IOException { super(in); @@ -146,20 +134,6 @@ protected Option newOption(StreamInput in) throws IOException { return new Option(in); } - private static final ObjectParser PARSER = new ObjectParser<>("TermSuggestionEntryParser", true, Entry::new); - static { - declareCommonFields(PARSER); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - /** * Contains the suggested text with its document frequency and score. */ @@ -204,27 +178,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(FREQ.getPreferredName(), freq); return builder; } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "TermSuggestionOptionParser", - true, - args -> { - Text text = new Text((String) args[0]); - int freq = (Integer) args[1]; - float score = (Float) args[2]; - return new Option(text, freq, score); - } - ); - - static { - PARSER.declareString(constructorArg(), Suggestion.Entry.Option.TEXT); - PARSER.declareInt(constructorArg(), FREQ); - PARSER.declareFloat(constructorArg(), Suggestion.Entry.Option.SCORE); - } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } diff --git a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java index e13c0c135c5e9..1970662782976 100644 --- a/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/spi/NamedXContentProviderTests.java @@ -10,8 +10,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; -import org.elasticsearch.search.suggest.term.TermSuggestion; +import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -61,12 +60,12 @@ public List getNamedXContentParsers() { new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("phrase_aggregation"), - (parser, context) -> PhraseSuggestion.fromXContent(parser, (String) context) + (parser, context) -> SuggestTests.parsePhraseSuggestion(parser, (String) context) ), new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("test_suggestion"), - (parser, context) -> TermSuggestion.fromXContent(parser, (String) context) + (parser, context) -> SuggestTests.parseTermSuggestion(parser, (String) context) ) ); } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index 8bca9481529d9..04f3b998f8375 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -48,7 +48,7 @@ public void testFromXContent() throws IOException { } builder = nestedIdentity.innerToXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(builder)) { - NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); + NestedIdentity parsedNestedIdentity = SearchResponseUtils.parseNestedIdentity(parser); assertEquals(nestedIdentity, parsedNestedIdentity); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 40bdc3da37242..0eefa171f7c08 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -166,7 +166,7 @@ public void testFromXContent() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -201,7 +201,7 @@ public void testFromXContentLenientParsing() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -219,7 +219,7 @@ public void testFromXContentWithoutTypeAndId() throws IOException { SearchHit parsed; try (XContentParser parser = createParser(JsonXContent.jsonXContent, hit)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -341,7 +341,7 @@ public void testWeirdScriptFields() throws Exception { "result": [null] } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -360,7 +360,7 @@ public void testWeirdScriptFields() throws Exception { } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -384,7 +384,7 @@ public void testWeirdScriptFields() throws Exception { } }"""); - SearchHit searchHit = SearchHit.fromXContent(parser); + SearchHit searchHit = SearchResponseUtils.parseSearchHit(parser); Map fields = searchHit.getFields(); assertEquals(1, fields.size()); DocumentField result = fields.get("result"); @@ -410,7 +410,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -431,7 +431,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -448,7 +448,7 @@ public void testToXContentEmptyFields() throws IOException { final SearchHit parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); + parsed = SearchResponseUtils.parseSearchHit(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 4ca3c5b8dd46e..0d75358768dab 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -225,7 +225,7 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException { assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); assertEquals(SearchHits.Fields.HITS, parser.currentName()); assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - SearchHits searchHits = SearchHits.fromXContent(parser); + SearchHits searchHits = SearchResponseUtils.parseSearchHits(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); try { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 42fe65c8d14ef..0c6721e1f62e5 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -12,9 +12,11 @@ import org.elasticsearch.common.text.Text; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitTests; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -28,11 +30,75 @@ import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class CompletionSuggestionOptionTests extends ESTestCase { + private static final ObjectParser, Void> PARSER = new ObjectParser<>( + "CompletionOptionParser", + SearchResponseUtils.unknownMetaFieldConsumer, + HashMap::new + ); + + static { + SearchResponseUtils.declareInnerHitsParseFields(PARSER); + PARSER.declareString( + (map, value) -> map.put(Suggest.Suggestion.Entry.Option.TEXT.getPreferredName(), value), + Suggest.Suggestion.Entry.Option.TEXT + ); + PARSER.declareFloat( + (map, value) -> map.put(Suggest.Suggestion.Entry.Option.SCORE.getPreferredName(), value), + Suggest.Suggestion.Entry.Option.SCORE + ); + PARSER.declareObject( + (map, value) -> map.put(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName(), value), + (p, c) -> parseContexts(p), + CompletionSuggestion.Entry.Option.CONTEXTS + ); + } + + private static Map> parseContexts(XContentParser parser) throws IOException { + Map> contexts = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String key = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + Set values = new HashSet<>(); + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + ensureExpectedToken(XContentParser.Token.VALUE_STRING, parser.currentToken(), parser); + values.add(parser.text()); + } + contexts.put(key, values); + } + return contexts; + } + + public static Option parseOption(XContentParser parser) { + Map values = PARSER.apply(parser, null); + + Text text = new Text((String) values.get(Suggest.Suggestion.Entry.Option.TEXT.getPreferredName())); + Float score = (Float) values.get(Suggest.Suggestion.Entry.Option.SCORE.getPreferredName()); + @SuppressWarnings("unchecked") + Map> contexts = (Map>) values.get( + CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName() + ); + if (contexts == null) { + contexts = Collections.emptyMap(); + } + + SearchHit hit = null; + // the option either prints SCORE or inlines the search hit + if (score == null) { + hit = SearchResponseUtils.searchHitFromMap(values); + score = hit.getScore(); + } + CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(-1, text, score, contexts); + option.setHit(hit); + return option; + } + public static Option createTestItem() { Text text = new Text(randomAlphaOfLengthBetween(5, 15)); int docId = randomInt(); @@ -91,7 +157,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { } Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { - parsed = Option.fromXContent(parser); + parsed = parseOption(parser); assertNull(parser.nextToken()); } assertEquals(option.getText(), parsed.getText()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index d209f15a641f5..8c850a5be42ac 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -17,8 +17,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -28,6 +30,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,26 +63,76 @@ public class SuggestTests extends ESTestCase { new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("term"), - (parser, context) -> TermSuggestion.fromXContent(parser, (String) context) + (parser, context) -> parseTermSuggestion(parser, (String) context) ) ); namedXContents.add( new NamedXContentRegistry.Entry( Suggest.Suggestion.class, new ParseField("phrase"), - (parser, context) -> PhraseSuggestion.fromXContent(parser, (String) context) - ) - ); - namedXContents.add( - new NamedXContentRegistry.Entry( - Suggest.Suggestion.class, - new ParseField("completion"), - (parser, context) -> CompletionSuggestion.fromXContent(parser, (String) context) + (parser, context) -> parsePhraseSuggestion(parser, (String) context) ) ); + namedXContents.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField("completion"), (parser, context) -> { + CompletionSuggestion suggestion = new CompletionSuggestion((String) context, -1, false); + parseEntries(parser, suggestion, SuggestionEntryTests::parseCompletionSuggestionEntry); + return suggestion; + })); xContentRegistry = new NamedXContentRegistry(namedXContents); } + public static PhraseSuggestion parsePhraseSuggestion(XContentParser parser, String name) throws IOException { + PhraseSuggestion suggestion = new PhraseSuggestion(name, -1); + parseEntries(parser, suggestion, SuggestionEntryTests::parsePhraseSuggestionEntry); + return suggestion; + } + + private static > void parseEntries( + XContentParser parser, + Suggestion suggestion, + CheckedFunction entryParser + ) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { + suggestion.addTerm(entryParser.apply(parser)); + } + } + + static void declareCommonEntryParserFields(ObjectParser, Void> parser) { + parser.declareString((entry, text) -> entry.text = new Text(text), new ParseField(Suggestion.Entry.TEXT)); + parser.declareInt((entry, offset) -> entry.offset = offset, new ParseField(Suggestion.Entry.OFFSET)); + parser.declareInt((entry, length) -> entry.length = length, new ParseField(Suggestion.Entry.LENGTH)); + } + + public static TermSuggestion parseTermSuggestion(XContentParser parser, String name) throws IOException { + // the "size" parameter and the SortBy for TermSuggestion cannot be parsed from the response, use default values + TermSuggestion suggestion = new TermSuggestion(name, -1, SortBy.SCORE); + parseEntries(parser, suggestion, SuggestTests::parseTermSuggestionEntry); + return suggestion; + } + + private static final ObjectParser PARSER = new ObjectParser<>( + "TermSuggestionEntryParser", + true, + TermSuggestion.Entry::new + ); + static { + declareCommonEntryParserFields(PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> TermSuggestionOptionTests.parseEntryOption(p), + new ParseField(Suggest.Suggestion.Entry.OPTIONS) + ); + } + + public static TermSuggestion.Entry parseTermSuggestionEntry(XContentParser parser) { + return PARSER.apply(parser, null); + } + public static List getDefaultNamedXContents() { return namedXContents; } @@ -113,7 +166,7 @@ public void testFromXContent() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureFieldName(parser, parser.nextToken(), Suggest.NAME); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = Suggest.fromXContent(parser); + parsed = SearchResponseUtils.parseSuggest(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); @@ -210,7 +263,7 @@ public void testParsingExceptionOnUnknownSuggestion() throws IOException { BytesReference originalBytes = BytesReference.bytes(builder); try (XContentParser parser = createParser(builder.contentType().xContent(), originalBytes)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - ParsingException ex = expectThrows(ParsingException.class, () -> Suggest.fromXContent(parser)); + ParsingException ex = expectThrows(ParsingException.class, () -> SearchResponseUtils.parseSuggest(parser)); assertEquals("Could not parse suggestion keyed as [unknownSuggestion]", ex.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java index fc1589066a9fc..d437f5fdc5fe6 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -39,9 +41,53 @@ public class SuggestionEntryTests extends ESTestCase { private static final Map>, Function>> ENTRY_PARSERS = new HashMap<>(); static { - ENTRY_PARSERS.put(TermSuggestion.Entry.class, TermSuggestion.Entry::fromXContent); - ENTRY_PARSERS.put(PhraseSuggestion.Entry.class, PhraseSuggestion.Entry::fromXContent); - ENTRY_PARSERS.put(CompletionSuggestion.Entry.class, CompletionSuggestion.Entry::fromXContent); + ENTRY_PARSERS.put(TermSuggestion.Entry.class, SuggestTests::parseTermSuggestionEntry); + ENTRY_PARSERS.put(PhraseSuggestion.Entry.class, SuggestionEntryTests::parsePhraseSuggestionEntry); + ENTRY_PARSERS.put(CompletionSuggestion.Entry.class, SuggestionEntryTests::parseCompletionSuggestionEntry); + } + + private static final ObjectParser PHRASE_SUGGESTION_ENTRY_PARSER = new ObjectParser<>( + "PhraseSuggestionEntryParser", + true, + PhraseSuggestion.Entry::new + ); + static { + SuggestTests.declareCommonEntryParserFields(PHRASE_SUGGESTION_ENTRY_PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + PHRASE_SUGGESTION_ENTRY_PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> SuggestionOptionTests.parsePhraseSuggestionOption(p), + new ParseField(Entry.OPTIONS) + ); + } + + public static PhraseSuggestion.Entry parsePhraseSuggestionEntry(XContentParser parser) { + return PHRASE_SUGGESTION_ENTRY_PARSER.apply(parser, null); + } + + private static final ObjectParser COMPLETION_SUGGESTION_ENTRY_PARSER = new ObjectParser<>( + "CompletionSuggestionEntryParser", + true, + CompletionSuggestion.Entry::new + ); + static { + SuggestTests.declareCommonEntryParserFields(COMPLETION_SUGGESTION_ENTRY_PARSER); + /* + * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. + * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 + */ + COMPLETION_SUGGESTION_ENTRY_PARSER.declareObjectArray( + (e, o) -> e.addOptions(o), + (p, c) -> CompletionSuggestionOptionTests.parseOption(p), + new ParseField(Entry.OPTIONS) + ); + } + + public static CompletionSuggestion.Entry parseCompletionSuggestionEntry(XContentParser parser) { + return COMPLETION_SUGGESTION_ENTRY_PARSER.apply(parser, null); } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java index 0261ab623ee8f..d818f05e75e12 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionOptionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; import org.elasticsearch.search.suggest.phrase.PhraseSuggestion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -21,11 +22,38 @@ import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.COLLATE_MATCH; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.HIGHLIGHTED; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.SCORE; +import static org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option.TEXT; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SuggestionOptionTests extends ESTestCase { + private static final ConstructingObjectParser PHRASE_OPTION_PARSER = + new ConstructingObjectParser<>("PhraseOptionParser", true, args -> { + Text text = new Text((String) args[0]); + float score = (Float) args[1]; + String highlighted = (String) args[2]; + Text highlightedText = highlighted == null ? null : new Text(highlighted); + Boolean collateMatch = (Boolean) args[3]; + return new PhraseSuggestion.Entry.Option(text, highlightedText, score, collateMatch); + }); + + static { + PHRASE_OPTION_PARSER.declareString(constructorArg(), TEXT); + PHRASE_OPTION_PARSER.declareFloat(constructorArg(), SCORE); + PHRASE_OPTION_PARSER.declareString(optionalConstructorArg(), HIGHLIGHTED); + PHRASE_OPTION_PARSER.declareBoolean(optionalConstructorArg(), COLLATE_MATCH); + } + + public static PhraseSuggestion.Entry.Option parsePhraseSuggestionOption(XContentParser parser) { + return PHRASE_OPTION_PARSER.apply(parser, null); + } + public static Option createTestItem() { Text text = new Text(randomAlphaOfLengthBetween(5, 15)); float score = randomFloat(); @@ -56,7 +84,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = PhraseSuggestion.Entry.Option.fromXContent(parser); + parsed = parsePhraseSuggestionOption(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java index 0e92a0ee37682..f9df18fd0944a 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -130,7 +131,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - parsed = Suggestion.fromXContent(parser); + parsed = SearchResponseUtils.parseSuggestion(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); } @@ -153,7 +154,7 @@ public void testFromXContentWithoutTypeParam() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - assertNull(Suggestion.fromXContent(parser)); + assertNull(SearchResponseUtils.parseSuggestion(parser)); ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); } } @@ -187,7 +188,10 @@ public void testUnknownSuggestionTypeThrows() throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); - NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, () -> Suggestion.fromXContent(parser)); + NamedObjectNotFoundException e = expectThrows( + NamedObjectNotFoundException.class, + () -> SearchResponseUtils.parseSuggestion(parser) + ); assertEquals("[1:31] unknown field [unknownType]", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java index aa5e9fcc3edbf..f13267e60b36f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/TermSuggestionOptionTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; +import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestion.Entry.Option; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -22,6 +24,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class TermSuggestionOptionTests extends ESTestCase { @@ -54,7 +57,7 @@ private void doTestFromXContent(boolean addRandomFields) throws IOException { Option parsed; try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - parsed = Option.fromXContent(parser); + parsed = parseEntryOption(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } @@ -71,4 +74,24 @@ public void testToXContent() throws IOException { {"text":"someText","score":1.3,"freq":100}""", xContent.utf8ToString()); } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "TermSuggestionOptionParser", + true, + args -> { + Text text = new Text((String) args[0]); + int freq = (Integer) args[1]; + float score = (Float) args[2]; + return new Option(text, freq, score); + } + ); + + static { + PARSER.declareString(constructorArg(), Suggest.Suggestion.Entry.Option.TEXT); + PARSER.declareInt(constructorArg(), TermSuggestion.Entry.Option.FREQ); + PARSER.declareFloat(constructorArg(), Suggest.Suggestion.Entry.Option.SCORE); + } + + public static Option parseEntryOption(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 8831149fec905..91bee1ee253e9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -7,17 +7,31 @@ */ package org.elasticsearch.search; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TotalHits; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Response; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.IgnoredFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.SearchProfileDfsPhaseResult; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; @@ -28,25 +42,46 @@ import org.elasticsearch.search.profile.query.QueryProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public enum SearchResponseUtils { ; + // All fields on the root level of the parsed SearchHit are interpreted as metadata fields + // public because we use it in a completion suggestion option + @SuppressWarnings("unchecked") + public static final ObjectParser.UnknownFieldConsumer> unknownMetaFieldConsumer = (map, fieldName, fieldValue) -> { + Map fieldMap = (Map) map.computeIfAbsent( + SearchHit.METADATA_FIELDS, + v -> new HashMap() + ); + if (fieldName.equals(IgnoredFieldMapper.NAME)) { + fieldMap.put(fieldName, new DocumentField(fieldName, (List) fieldValue)); + } else { + fieldMap.put(fieldName, new DocumentField(fieldName, Collections.singletonList(fieldValue))); + } + }; + public static TotalHits getTotalHits(SearchRequestBuilder request) { var resp = request.get(); try { @@ -190,11 +225,11 @@ public static SearchResponse parseInnerSearchResponse(XContentParser parser) thr } } else if (token == XContentParser.Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { - hits = SearchHits.fromXContent(parser); + hits = parseSearchHits(parser); } else if (InternalAggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { aggs = InternalAggregations.fromXContent(parser); } else if (Suggest.NAME.equals(currentFieldName)) { - suggest = Suggest.fromXContent(parser); + suggest = parseSuggest(parser); } else if (SearchProfileResults.PROFILE_FIELD.equals(currentFieldName)) { profile = parseSearchProfileResults(parser); } else if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { @@ -523,4 +558,341 @@ public static QueryProfileShardResult parseQueryProfileShardResult(XContentParse } return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); } + + public static SearchHits parseSearchHits(XContentParser parser) throws IOException { + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + } + XContentParser.Token token = parser.currentToken(); + String currentFieldName = null; + List hits = new ArrayList<>(); + TotalHits totalHits = null; + float maxScore = 0f; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SearchHits.Fields.TOTAL.equals(currentFieldName)) { + // For BWC with nodes pre 7.0 + long value = parser.longValue(); + totalHits = value == -1 ? null : new TotalHits(value, TotalHits.Relation.EQUAL_TO); + } else if (SearchHits.Fields.MAX_SCORE.equals(currentFieldName)) { + maxScore = parser.floatValue(); + } + } else if (token == XContentParser.Token.VALUE_NULL) { + if (SearchHits.Fields.MAX_SCORE.equals(currentFieldName)) { + maxScore = Float.NaN; // NaN gets rendered as null-field + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (SearchHits.Fields.HITS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + hits.add(parseSearchHit(parser)); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (SearchHits.Fields.TOTAL.equals(currentFieldName)) { + totalHits = SearchHits.parseTotalHitsFragment(parser); + } else { + parser.skipChildren(); + } + } + } + return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); + } + + /** + * This parser outputs a temporary map of the objects needed to create the + * SearchHit instead of directly creating the SearchHit. The reason for this + * is that this way we can reuse the parser when parsing xContent from + * {@link org.elasticsearch.search.suggest.completion.CompletionSuggestion.Entry.Option} which unfortunately inlines + * the output of + * {@link SearchHit#toInnerXContent(XContentBuilder, org.elasticsearch.xcontent.ToXContent.Params)} + * of the included search hit. The output of the map is used to create the + * actual SearchHit instance via {@link SearchResponseUtils#searchHitFromMap(Map)} + */ + static final ObjectParser, Void> MAP_PARSER = new ObjectParser<>( + "innerHitParser", + unknownMetaFieldConsumer, + HashMap::new + ); + + static { + declareInnerHitsParseFields(MAP_PARSER); + } + + public static SearchHit parseSearchHit(XContentParser parser) { + return searchHitFromMap(MAP_PARSER.apply(parser, null)); + } + + public static void declareInnerHitsParseFields(ObjectParser, Void> parser) { + parser.declareString((map, value) -> map.put(SearchHit.Fields._INDEX, value), new ParseField(SearchHit.Fields._INDEX)); + parser.declareString((map, value) -> map.put(SearchHit.Fields._ID, value), new ParseField(SearchHit.Fields._ID)); + parser.declareString((map, value) -> map.put(SearchHit.Fields._NODE, value), new ParseField(SearchHit.Fields._NODE)); + parser.declareField( + (map, value) -> map.put(SearchHit.Fields._SCORE, value), + SearchResponseUtils::parseScore, + new ParseField(SearchHit.Fields._SCORE), + ObjectParser.ValueType.FLOAT_OR_NULL + ); + parser.declareInt((map, value) -> map.put(SearchHit.Fields._RANK, value), new ParseField(SearchHit.Fields._RANK)); + + parser.declareLong((map, value) -> map.put(SearchHit.Fields._VERSION, value), new ParseField(SearchHit.Fields._VERSION)); + parser.declareLong((map, value) -> map.put(SearchHit.Fields._SEQ_NO, value), new ParseField(SearchHit.Fields._SEQ_NO)); + parser.declareLong((map, value) -> map.put(SearchHit.Fields._PRIMARY_TERM, value), new ParseField(SearchHit.Fields._PRIMARY_TERM)); + parser.declareField( + (map, value) -> map.put(SearchHit.Fields._SHARD, value), + (p, c) -> ShardId.fromString(p.text()), + new ParseField(SearchHit.Fields._SHARD), + ObjectParser.ValueType.STRING + ); + parser.declareObject( + (map, value) -> map.put(SourceFieldMapper.NAME, value), + (p, c) -> parseSourceBytes(p), + new ParseField(SourceFieldMapper.NAME) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields.HIGHLIGHT, value), + (p, c) -> parseHighlightFields(p), + new ParseField(SearchHit.Fields.HIGHLIGHT) + ); + parser.declareObject((map, value) -> { + Map fieldMap = get(SearchHit.Fields.FIELDS, map, new HashMap<>()); + fieldMap.putAll(value); + map.put(SearchHit.DOCUMENT_FIELDS, fieldMap); + }, (p, c) -> parseFields(p), new ParseField(SearchHit.Fields.FIELDS)); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields._EXPLANATION, value), + (p, c) -> parseExplanation(p), + new ParseField(SearchHit.Fields._EXPLANATION) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.NestedIdentity._NESTED, value), + (p, ignored) -> parseNestedIdentity(p), + new ParseField(SearchHit.NestedIdentity._NESTED) + ); + parser.declareObject( + (map, value) -> map.put(SearchHit.Fields.INNER_HITS, value), + (p, c) -> parseInnerHits(p), + new ParseField(SearchHit.Fields.INNER_HITS) + ); + + parser.declareField((p, map, context) -> { + XContentParser.Token token = p.currentToken(); + Map matchedQueries = new LinkedHashMap<>(); + if (token == XContentParser.Token.START_OBJECT) { + String fieldName = null; + while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token.isValue()) { + matchedQueries.put(fieldName, p.floatValue()); + } + } + } else if (token == XContentParser.Token.START_ARRAY) { + while (p.nextToken() != XContentParser.Token.END_ARRAY) { + matchedQueries.put(p.text(), Float.NaN); + } + } + map.put(SearchHit.Fields.MATCHED_QUERIES, matchedQueries); + }, new ParseField(SearchHit.Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); + + parser.declareField( + (map, list) -> map.put(SearchHit.Fields.SORT, list), + SearchSortValues::fromXContent, + new ParseField(SearchHit.Fields.SORT), + ObjectParser.ValueType.OBJECT_ARRAY + ); + } + + private static float parseScore(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER || parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return parser.floatValue(); + } else { + return Float.NaN; + } + } + + private static BytesReference parseSourceBytes(XContentParser parser) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { + // the original document gets slightly modified: whitespaces or + // pretty printing are not preserved, + // it all depends on the current builder settings + builder.copyCurrentStructure(parser); + return BytesReference.bytes(builder); + } + } + + private static Map parseFields(XContentParser parser) throws IOException { + Map fields = new HashMap<>(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + DocumentField field = DocumentField.fromXContent(parser); + fields.put(field.getName(), field); + } + return fields; + } + + private static Map parseInnerHits(XContentParser parser) throws IOException { + Map innerHits = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String name = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); + innerHits.put(name, parseSearchHits(parser)); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); + } + return innerHits; + } + + private static Map parseHighlightFields(XContentParser parser) throws IOException { + Map highlightFields = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + HighlightField highlightField = HighlightField.fromXContent(parser); + highlightFields.put(highlightField.name(), highlightField); + } + return highlightFields; + } + + private static Explanation parseExplanation(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + XContentParser.Token token; + Float value = null; + String description = null; + List details = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (SearchHit.Fields.VALUE.equals(currentFieldName)) { + value = parser.floatValue(); + } else if (SearchHit.Fields.DESCRIPTION.equals(currentFieldName)) { + description = parser.textOrNull(); + } else if (SearchHit.Fields.DETAILS.equals(currentFieldName)) { + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + details.add(parseExplanation(parser)); + } + } else { + parser.skipChildren(); + } + } + if (value == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation value"); + } + if (description == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation description"); + } + return Explanation.match(value, description, details); + } + + /** + * this parsing method assumes that the leading "suggest" field name has already been parsed by the caller + */ + public static Suggest parseSuggest(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + List>> suggestions = + new ArrayList<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser); + String currentField = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser); + Suggest.Suggestion> suggestion = parseSuggestion( + parser + ); + if (suggestion != null) { + suggestions.add(suggestion); + } else { + throw new ParsingException( + parser.getTokenLocation(), + String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField) + ); + } + } + return new Suggest(suggestions); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static Suggest.Suggestion> parseSuggestion( + XContentParser parser + ) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + SetOnce suggestion = new SetOnce<>(); + XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Suggest.Suggestion.class, suggestion::set); + return suggestion.get(); + } + + private static final ConstructingObjectParser NESTED_IDENTITY_PARSER = new ConstructingObjectParser<>( + "nested_identity", + true, + ctorArgs -> new SearchHit.NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (SearchHit.NestedIdentity) ctorArgs[2]) + ); + static { + NESTED_IDENTITY_PARSER.declareString(constructorArg(), new ParseField(SearchHit.NestedIdentity.FIELD)); + NESTED_IDENTITY_PARSER.declareInt(constructorArg(), new ParseField(SearchHit.NestedIdentity.OFFSET)); + NESTED_IDENTITY_PARSER.declareObject( + optionalConstructorArg(), + NESTED_IDENTITY_PARSER, + new ParseField(SearchHit.NestedIdentity._NESTED) + ); + } + + public static SearchHit.NestedIdentity parseNestedIdentity(XContentParser parser) { + return NESTED_IDENTITY_PARSER.apply(parser, null); + } + + public static SearchHit searchHitFromMap(Map values) { + String id = get(SearchHit.Fields._ID, values, null); + String index = get(SearchHit.Fields._INDEX, values, null); + String clusterAlias = null; + if (index != null) { + int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (indexOf > 0) { + clusterAlias = index.substring(0, indexOf); + index = index.substring(indexOf + 1); + } + } + ShardId shardId = get(SearchHit.Fields._SHARD, values, null); + String nodeId = get(SearchHit.Fields._NODE, values, null); + final SearchShardTarget shardTarget; + if (shardId != null && nodeId != null) { + assert shardId.getIndexName().equals(index); + shardTarget = new SearchShardTarget(nodeId, shardId, clusterAlias); + index = shardTarget.getIndex(); + clusterAlias = shardTarget.getClusterAlias(); + } else { + shardTarget = null; + } + return new SearchHit( + -1, + get(SearchHit.Fields._SCORE, values, SearchHit.DEFAULT_SCORE), + get(SearchHit.Fields._RANK, values, SearchHit.NO_RANK), + id == null ? null : new Text(id), + get(SearchHit.NestedIdentity._NESTED, values, null), + get(SearchHit.Fields._VERSION, values, -1L), + get(SearchHit.Fields._SEQ_NO, values, SequenceNumbers.UNASSIGNED_SEQ_NO), + get(SearchHit.Fields._PRIMARY_TERM, values, SequenceNumbers.UNASSIGNED_PRIMARY_TERM), + get(SourceFieldMapper.NAME, values, null), + get(SearchHit.Fields.HIGHLIGHT, values, null), + get(SearchHit.Fields.SORT, values, SearchSortValues.EMPTY), + get(SearchHit.Fields.MATCHED_QUERIES, values, null), + get(SearchHit.Fields._EXPLANATION, values, null), + shardTarget, + index, + clusterAlias, + null, + get(SearchHit.Fields.INNER_HITS, values, null), + get(SearchHit.DOCUMENT_FIELDS, values, Collections.emptyMap()), + get(SearchHit.METADATA_FIELDS, values, Collections.emptyMap()), + RefCounted.ALWAYS_REFERENCED // TODO: do we ever want pooling here? + ); + } + + @SuppressWarnings("unchecked") + private static T get(String key, Map map, T defaultValue) { + return (T) map.getOrDefault(key, defaultValue); + } + } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java index fd6a203450c12..10fd4f09e86ac 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryImplTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -76,7 +77,7 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIndexReceived() { var client = mockClient(); - var unknownIndexHit = SearchHit.createFromMap(Map.of("_index", "unknown_index")); + var unknownIndexHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", "unknown_index")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit })); var registry = new ModelRegistryImpl(client); @@ -93,7 +94,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceEntry() { var client = mockClient(); - var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".secrets-inference")); + var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit })); var registry = new ModelRegistryImpl(client); @@ -110,7 +111,7 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceSecretsEntry() { var client = mockClient(); - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); var registry = new ModelRegistryImpl(client); @@ -140,9 +141,9 @@ public void testGetModelWithSecrets() { } """; - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); - var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".secrets-inference")); + var inferenceSecretsHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".secrets-inference")); inferenceSecretsHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(secrets)))); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); @@ -171,7 +172,7 @@ public void testGetModelNoSecrets() { } """; - var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + var inferenceHit = SearchResponseUtils.searchHitFromMap(Map.of("_index", ".inference")); inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index 8d8cded819e23..25c5191afc218 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.persistent.UpdatePersistentTaskStatusAction; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -243,7 +244,11 @@ public void testPersistProgress_ProgressDocumentCreated() throws IOException { } public void testPersistProgress_ProgressDocumentUpdated() throws IOException { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistProgress(hits, ".ml-state-dummy"); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java index d265ae17ce6ea..78ee3e1d6e4fa 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunnerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -151,7 +152,7 @@ public void testInferTestDocs_GivenCancelWasCalled() { private static Deque buildSearchHits(List> vals) { return vals.stream().map(InferenceRunnerTests::fromMap).map(reference -> { - var pooled = SearchHit.createFromMap(Collections.singletonMap("_source", reference)); + var pooled = SearchResponseUtils.searchHitFromMap(Collections.singletonMap("_source", reference)); try { return pooled.asUnpooled(); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index db81fc2db3348..654ce7bf965bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; @@ -358,7 +359,11 @@ public void testPersistQuantilesSync_QuantilesDocumentCreated() { } public void testPersistQuantilesSync_QuantilesDocumentUpdated() { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistQuantilesSync(hits, ".ml-state-dummy"); } finally { @@ -399,7 +404,11 @@ public void testPersistQuantilesAsync_QuantilesDocumentCreated() { } public void testPersistQuantilesAsync_QuantilesDocumentUpdated() { - var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + var hits = new SearchHits( + new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, + null, + 0.0f + ); try { testPersistQuantilesAsync(hits, ".ml-state-dummy"); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java index a7ba148584637..64d0e2b835ffb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; @@ -124,7 +125,7 @@ public void testStateRead_StateDocumentCreated() throws IOException { public void testStateRead_StateDocumentUpdated() throws IOException { testStateRead( - SearchHits.unpooled(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), + SearchHits.unpooled(new SearchHit[] { SearchResponseUtils.searchHitFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), ".ml-state-dummy" ); } From 246dd5f2e6c1684b7d3ea7fe3eb295056bfaf385 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Wed, 27 Mar 2024 11:09:13 +0100 Subject: [PATCH 200/214] Fix AffixSetting.exists to include secure settings (#106745) --- docs/changelog/106745.yaml | 5 +++ .../common/settings/Setting.java | 12 +++++++ .../common/settings/SettingTests.java | 32 +++++++++++++------ 3 files changed, 40 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/106745.yaml diff --git a/docs/changelog/106745.yaml b/docs/changelog/106745.yaml new file mode 100644 index 0000000000000..a6cb035bd267a --- /dev/null +++ b/docs/changelog/106745.yaml @@ -0,0 +1,5 @@ +pr: 106745 +summary: Fix `AffixSetting.exists` to include secure settings +area: Infra/Core +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index aaedf0f8d8874..1b3173395791c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -888,6 +888,18 @@ private Stream matchStream(Settings settings) { return settings.keySet().stream().filter(this::match).map(key::getConcreteString); } + @Override + public boolean exists(Settings settings) { + // concrete settings might be secure, so don't exclude these here + return key.exists(settings.keySet(), Collections.emptySet()); + } + + @Override + public boolean exists(Settings.Builder builder) { + // concrete settings might be secure, so don't exclude these here + return key.exists(builder.keys(), Collections.emptySet()); + } + /** * Get the raw list of dependencies. This method is exposed for testing purposes and {@link #getSettingsDependencies(String)} * should be preferred for most all cases. diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 13f789a8b5fae..b198bf6dbed86 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -40,6 +40,7 @@ import java.util.stream.Stream; import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -914,19 +915,32 @@ public void testAffixKeySetting() { assertFalse(listAffixSetting.match("foo")); } - public void testAffixKeyExists() { - Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); + public void testAffixKeySettingWithSecure() { + Setting.AffixSetting secureSetting = Setting.affixKeySetting( + "foo.", + "secret", + (key) -> SecureSetting.secureString(key, null) + ); - assertFalse(setting.exists(Settings.EMPTY)); - assertTrue(setting.exists(Settings.builder().put("foo.test.enable", "true").build())); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("foo.a.secret", "secret1"); + secureSettings.setString("foo.b.secret", "secret2"); + Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + + assertThat(secureSetting.exists(settings), is(true)); + + Map secrets = secureSetting.getAsMap(settings); + assertThat(secrets.keySet(), contains("a", "b")); + + Setting secureA = secureSetting.getConcreteSetting("foo.a.secret"); + assertThat(secureA.get(settings), is("secret1")); + assertThat(secrets.get("a"), is("secret1")); } - public void testAffixKeyExistsWithSecure() { + public void testAffixKeyExists() { Setting setting = Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); - - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("foo.test.enabled", "true"); - assertFalse(setting.exists(Settings.builder().setSecureSettings(secureSettings).build())); + assertFalse(setting.exists(Settings.EMPTY)); + assertTrue(setting.exists(Settings.builder().put("foo.test.enable", "true").build())); } public void testAffixSettingNamespaces() { From 4365e5e7bdf8622cea4daa400c2b7188b3ba1eb8 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 27 Mar 2024 11:10:25 +0100 Subject: [PATCH 201/214] Add a test that `RecoveryState.FileDetails` can be modified concurrently (#106717) One of the suggestions in #87568 about the cause of the failure of the test was issues with concurrent update of `RecoveryState.Index`. This test shows that we can concurrently call `addRecoveredFromSnapshotBytesToFile` from multiple threads without corrupting the data. It works because all operations on `RecoveryState.Index` are synchronized despite that the underlying data classes `RecoveryFilesDetails` and `FileDetail` are not thread-safe. See #87568 --- .../indices/recovery/RecoveryTargetTests.java | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java index aa749c5dffe5f..c4e708320946f 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java @@ -31,7 +31,10 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptySet; @@ -589,4 +592,35 @@ public void testFileHashCodeAndEquals() { } } } + + public void testConcurrentlyAddRecoveredFromSnapshotBytes() { + var index = new RecoveryState.Index(); + int numIndices = randomIntBetween(1, 4); + for (int i = 0; i < numIndices; i++) { + index.addFileDetail("foo_" + i, randomIntBetween(1, 100), false); + } + + var executor = Executors.newFixedThreadPool(randomIntBetween(2, 8)); + try { + int count = randomIntBetween(1000, 10_000); + var latch = new CountDownLatch(count); + var recoveredBytes = new AtomicLong(); + for (int i = 0; i < count; i++) { + String indexName = "foo_" + (i % numIndices); + executor.submit(() -> { + int bytes = randomIntBetween(1, 1000); + // This is safe because the whole addRecoveredFromSnapshotBytesToFile method is synchronized + index.addRecoveredFromSnapshotBytesToFile(indexName, bytes); + // This fails because only getFileDetails is synchronized + // index.getFileDetails(indexName).addRecoveredFromSnapshotBytes(bytes); + recoveredBytes.addAndGet(bytes); + latch.countDown(); + }); + } + safeAwait(latch); + assertEquals(recoveredBytes.get(), index.recoveredFromSnapshotBytes()); + } finally { + executor.shutdownNow(); + } + } } From 5f132caaa95b340997e6505d5b9d83b895ec6d70 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Wed, 27 Mar 2024 11:14:23 +0100 Subject: [PATCH 202/214] Fix range queries for float/half_float fields when bounds are out of type's range (#106691) * Fix for range queries * Add yaml rest tests * Update docs/changelog/106691.yaml * Add more tests * Add more assertions --- docs/changelog/106691.yaml | 6 + .../search/510_range_query_out_of_bounds.yml | 290 ++++++++++++++++++ .../index/mapper/NumberFieldMapper.java | 49 ++- .../index/mapper/NumberFieldTypeTests.java | 89 ++++++ 4 files changed, 417 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/106691.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml diff --git a/docs/changelog/106691.yaml b/docs/changelog/106691.yaml new file mode 100644 index 0000000000000..cbae9796e38c7 --- /dev/null +++ b/docs/changelog/106691.yaml @@ -0,0 +1,6 @@ +pr: 106691 +summary: Fix range queries for float/half_float fields when bounds are out of type's + range +area: Search +type: bug +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml new file mode 100644 index 0000000000000..b2f35fe724410 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/510_range_query_out_of_bounds.yml @@ -0,0 +1,290 @@ +setup: + - skip: + version: " - 8.13.99" + reason: fixed in 8.14.0 + - do: + indices.create: + index: range_query_test_index + body: + mappings: + properties: + half_float_field: + type: half_float + float_field: + type: float + keyword_field: + type: keyword + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "min_boundary_doc" } }' + - '{"half_float_field" : -65504, "float_field" : -3.4028235E38 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "max_boundary_doc" } }' + - '{"half_float_field" : 65504, "float_field" : 3.4028235E38 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "1" } }' + - '{"half_float_field" : -1, "float_field" : -1 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "2" } }' + - '{"half_float_field" : 1, "float_field" : 1 }' + - '{ "index" : { "_index" : "range_query_test_index", "_id" : "3" } }' + - '{"keyword": "I am missing the half_float/float fields and should not be part of the results" }' + +--- +"Test range query for half_float field with out of bounds upper limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 1e+300 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for float field with out of bounds upper limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 1e+300 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for half_float field with out of bounds lower limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gte: -1e+300 + lt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with out of bounds lower limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gte: -1e+300 + lt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater or equal than half float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gte: -65504 + lt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater or equal than float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gte: -3.4028235E38 + lt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "min_boundary_doc" } + - match: { hits.hits.1._id: "1" } + +--- +"Test range query for float field with greater than half float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + gt: -65504 + lt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"Test range query for float field with greater than float min value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + gt: -3.4028235E38 + lt: 0 + sort: float_field + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"Test range query for half_float field with lower or equal than half float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 65504 + gt: 0 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for float field with lower or equal than float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 3.4028235E38 + gt: 0 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "max_boundary_doc" } + +--- +"Test range query for half_float field with lower than half float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lt: 65504 + gt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "2" } + +--- +"Test range query for float field with lower than float max value limit": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lt: 3.4028235E38 + gt: 0 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._id: "2" } + +--- +"Test range query for half float field with lt and gt limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lt: 1 + gt: -1 + + - length: { hits.hits: 0 } + +--- +"Test range query for float field with lt and gt limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lt: 1 + gt: -1 + + - length: { hits.hits: 0 } + +--- +"Test range query for half_float field with gte and lte limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + half_float_field: + lte: 1 + gte: -1 + sort: half_float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + +--- +"Test range query for float field with gte and lte limits": + - do: + search: + index: range_query_test_index + body: + query: + range: + float_field: + lte: 1 + gte: -1 + sort: float_field + + - length: { hits.hits: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index c04c3e5afdc70..1f7a3bf2106ae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -275,6 +275,7 @@ public enum NumberType { @Override public Float parse(Object value, boolean coerce) { final float result = parseToFloat(value); + validateFiniteValue(result); // Reduce the precision to what we actually index return HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(result)); } @@ -286,9 +287,9 @@ public double reduceToStoredPrecision(double value) { /** * Parse a query parameter or {@code _source} value to a float, - * keeping float precision. Used by queries which need more - * precise control over their rounding behavior that - * {@link #parse(Object, boolean)} provides. + * keeping float precision. Used by queries which do need to validate + * against infinite values, but need more precise control over their + * rounding behavior that {@link #parse(Object, boolean)} provides. */ private static float parseToFloat(Object value) { final float result; @@ -301,7 +302,6 @@ private static float parseToFloat(Object value) { } result = Float.parseFloat(value.toString()); } - validateParsed(result); return result; } @@ -313,13 +313,14 @@ public Number parsePoint(byte[] value) { @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); - validateParsed(parsed); + validateFiniteValue(parsed); return parsed; } @Override public Query termQuery(String field, Object value, boolean isIndexed) { float v = parseToFloat(value); + validateFiniteValue(v); if (isIndexed) { return HalfFloatPoint.newExactQuery(field, v); } else { @@ -332,7 +333,9 @@ public Query termsQuery(String field, Collection values) { float[] v = new float[values.size()]; int pos = 0; for (Object value : values) { - v[pos++] = parseToFloat(value); + float float_value = parseToFloat(value); + validateFiniteValue(float_value); + v[pos++] = float_value; } return HalfFloatPoint.newSetQuery(field, v); } @@ -420,7 +423,7 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( ); } - private static void validateParsed(float value) { + private static void validateFiniteValue(float value) { if (Float.isFinite(HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value))) == false) { throw new IllegalArgumentException("[half_float] supports only finite values, but got [" + value + "]"); } @@ -449,6 +452,17 @@ BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSo FLOAT("float", NumericType.FLOAT) { @Override public Float parse(Object value, boolean coerce) { + final float result = parseToFloat(value); + validateFiniteValue(result); + return result; + } + + /** + * Parse a query parameter or {@code _source} value to a float, + * keeping float precision. Used by queries which do need validate + * against infinite values like {@link #parse(Object, boolean)} does. + */ + private static float parseToFloat(Object value) { final float result; if (value instanceof Number) { @@ -459,7 +473,6 @@ public Float parse(Object value, boolean coerce) { } result = Float.parseFloat(value.toString()); } - validateParsed(result); return result; } @@ -476,13 +489,13 @@ public Number parsePoint(byte[] value) { @Override public Float parse(XContentParser parser, boolean coerce) throws IOException { float parsed = parser.floatValue(coerce); - validateParsed(parsed); + validateFiniteValue(parsed); return parsed; } @Override public Query termQuery(String field, Object value, boolean isIndexed) { - float v = parse(value, false); + float v = parseToFloat(value); if (isIndexed) { return FloatPoint.newExactQuery(field, v); } else { @@ -514,16 +527,18 @@ public Query rangeQuery( float l = Float.NEGATIVE_INFINITY; float u = Float.POSITIVE_INFINITY; if (lowerTerm != null) { - l = parse(lowerTerm, false); - if (includeLower == false) { - l = FloatPoint.nextUp(l); + l = parseToFloat(lowerTerm); + if (includeLower) { + l = FloatPoint.nextDown(l); } + l = FloatPoint.nextUp(l); } if (upperTerm != null) { - u = parse(upperTerm, false); - if (includeUpper == false) { - u = FloatPoint.nextDown(u); + u = parseToFloat(upperTerm); + if (includeUpper) { + u = FloatPoint.nextUp(u); } + u = FloatPoint.nextDown(u); } Query query; if (isIndexed) { @@ -582,7 +597,7 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( ); } - private static void validateParsed(float value) { + private static void validateFiniteValue(float value) { if (Float.isFinite(value) == false) { throw new IllegalArgumentException("[float] supports only finite values, but got [" + value + "]"); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 40d1f2488749a..7acb89a857772 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -346,6 +346,95 @@ public void testLongRangeQueryWithDecimalParts() { ); } + public void testHalfFloatRangeQueryWithOverflowingBounds() { + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.HALF_FLOAT, randomBoolean()); + final float min_half_float = -65504; + final float max_half_float = 65504; + assertEquals( + ft.rangeQuery(min_half_float, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-1e+300, 10, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(min_half_float, 10, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, max_half_float, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, 1e+300, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, max_half_float, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, max_half_float, false, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(min_half_float, 10, true, false, null, null, null, MOCK_CONTEXT) + ); + } + + public void testFloatRangeQueryWithOverflowingBounds() { + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.FLOAT, randomBoolean()); + + assertEquals( + ft.rangeQuery(-Float.MAX_VALUE, 10.0, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-1e+300, 10.0, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-Float.MAX_VALUE, 10.0, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10.0, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, Float.MAX_VALUE, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, 1e+300, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, Float.MAX_VALUE, true, true, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, true, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.POSITIVE_INFINITY, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(Float.NEGATIVE_INFINITY, 10, false, false, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(10, 1e+300, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(10, Float.MAX_VALUE, false, true, null, null, null, MOCK_CONTEXT) + ); + + assertEquals( + ft.rangeQuery(-1e+300, 10, false, false, null, null, null, MOCK_CONTEXT), + ft.rangeQuery(-Float.MAX_VALUE, 10, true, false, null, null, null, MOCK_CONTEXT) + ); + } + public void testRangeQuery() { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); Query expected = new IndexOrDocValuesQuery( From 9776f54928b93da867f3aa367916293847f96e46 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Wed, 27 Mar 2024 10:50:39 +0000 Subject: [PATCH 203/214] Auto sharding uses the sum of shards write loads (#106785) Data stream auto sharding uses the index write load to decide the optimal number of shards. We read this previously from the indexing stats output, using the `total/write_load` value however, this proved to be wrong as that value takes into account the search shard write load (which will always be 0). Even more, the `total/write_load` value averages the write loads for every shard so you can end up with indices that only have one primary and one replica, with the primary shard having a write load of 1.7 and the `total/write_load` reporting to be `0.8`. For data stream auto sharding we're interested in the **total** index write load, defined as the sum of all the shards write loads (yes we can include the replica shard write loads in this sum as they're 0). This PR changes the rollover write load computation to sum all the shard write loads for the data stream write index, and in the `DataStreamAutoShardingService` when looking at the historic write load over the cooldown period to, again, sum the write loads of every shard in the index metadata/stats. --- .../datastreams/DataStreamAutoshardingIT.java | 185 +++++++----------- .../rollover/TransportRolloverAction.java | 15 +- .../DataStreamAutoShardingService.java | 21 +- .../DataStreamAutoShardingServiceTests.java | 11 +- 4 files changed, 89 insertions(+), 143 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java index 332622cc98db8..9f2e6feb91659 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamAutoshardingIT.java @@ -129,22 +129,17 @@ public void testRolloverOnAutoShardCondition() throws Exception { for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number // of 5 shards - shards.add(getShardStats(firstGenerationMeta, i, 75, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats( + firstGenerationMeta, + i, + (long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()), + assignedShardNodeId + ) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -180,21 +175,16 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), for (int i = 0; i < secondGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal number of // 7 shards - shards.add(getShardStats(secondGenerationMeta, i, 100, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats( + secondGenerationMeta, + i, + (long) Math.ceil(100.0 / secondGenerationMeta.getNumberOfShards()), + assignedShardNodeId + ) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationMeta, shards); RolloverResponse response = indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet(); assertAcked(response); @@ -232,21 +222,11 @@ instance.new NodeResponse(node.getId(), secondGenerationMeta.getNumberOfShards() for (int i = 0; i < thirdGenIndex.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal // number of 7 shards - shards.add(getShardStats(thirdGenIndex, i, 100, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), thirdGenIndex.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add( + getShardStats(thirdGenIndex, i, (long) Math.ceil(100.0 / thirdGenIndex.getNumberOfShards()), assignedShardNodeId) + ); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, thirdGenIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); request.setConditions(RolloverConditions.newBuilder().addMaxIndexDocsCondition(1_000_000L).build()); @@ -309,22 +289,10 @@ public void testReduceShardsOnRollover() throws IOException { for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an optimal number // of 2 shards - shards.add(getShardStats(firstGenerationMeta, i, 2, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); + shards.add(getShardStats(firstGenerationMeta, i, i < 2 ? 1 : 0, assignedShardNodeId)); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -356,23 +324,11 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), .index(dataStreamBeforeRollover.getIndices().get(1)); List shards = new ArrayList<>(secondGenerationIndex.getNumberOfShards()); for (int i = 0; i < secondGenerationIndex.getNumberOfShards(); i++) { - // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an optimal - // number of 2 shards - shards.add(getShardStats(secondGenerationIndex, i, 2, assignedShardNodeId)); - } - - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenerationIndex.getNumberOfShards(), shards, List.of()) - ); - }); + // the shard stats will yield a write load of 2.0 which will make the auto sharding service recommend an + // optimal number of 2 shards + shards.add(getShardStats(secondGenerationIndex, i, i < 2 ? 1 : 0, assignedShardNodeId)); } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenerationIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); // adding condition that does NOT match @@ -438,6 +394,11 @@ public void testLazyRolloverKeepsPreviousAutoshardingDecision() throws IOExcepti IndexMetadata firstGenerationMeta = clusterStateBeforeRollover.getMetadata().index(firstGenerationIndex); List shards = new ArrayList<>(firstGenerationMeta.getNumberOfShards()); + String assignedShardNodeId = clusterStateBeforeRollover.routingTable() + .index(dataStreamBeforeRollover.getWriteIndex()) + .shard(0) + .primaryShard() + .currentNodeId(); for (int i = 0; i < firstGenerationMeta.getNumberOfShards(); i++) { // the shard stats will yield a write load of 75.0 which will make the auto sharding service recommend an optimal number // of 5 shards @@ -445,29 +406,13 @@ public void testLazyRolloverKeepsPreviousAutoshardingDecision() throws IOExcepti getShardStats( firstGenerationMeta, i, - 75, - clusterStateBeforeRollover.routingTable() - .index(dataStreamBeforeRollover.getWriteIndex()) - .shard(0) - .primaryShard() - .currentNodeId() + (long) Math.ceil(75.0 / firstGenerationMeta.getNumberOfShards()), + assignedShardNodeId ) ); } - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), shards, List.of()) - ); - }); - } - + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, firstGenerationMeta, shards); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); ClusterState clusterStateAfterRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); @@ -491,37 +436,22 @@ instance.new NodeResponse(node.getId(), firstGenerationMeta.getNumberOfShards(), ClusterState clusterStateBeforeRollover = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); DataStream dataStreamBeforeRollover = clusterStateBeforeRollover.getMetadata().dataStreams().get(dataStreamName); + String assignedShardNodeId = clusterStateBeforeRollover.routingTable() + .index(dataStreamBeforeRollover.getWriteIndex()) + .shard(0) + .primaryShard() + .currentNodeId(); IndexMetadata secondGenIndex = clusterStateBeforeRollover.metadata().index(dataStreamBeforeRollover.getIndices().get(1)); List shards = new ArrayList<>(secondGenIndex.getNumberOfShards()); for (int i = 0; i < secondGenIndex.getNumberOfShards(); i++) { // the shard stats will yield a write load of 100.0 which will make the auto sharding service recommend an optimal // number of 7 shards shards.add( - getShardStats( - secondGenIndex, - i, - 100, - clusterStateBeforeRollover.routingTable() - .index(dataStreamBeforeRollover.getWriteIndex()) - .shard(i) - .primaryShard() - .currentNodeId() - ) + getShardStats(secondGenIndex, i, (long) Math.ceil(100.0 / secondGenIndex.getNumberOfShards()), assignedShardNodeId) ); } - for (DiscoveryNode node : clusterStateBeforeRollover.nodes().getAllNodes()) { - MockTransportService.getInstance(node.getName()) - .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { - TransportIndicesStatsAction instance = internalCluster().getInstance( - TransportIndicesStatsAction.class, - node.getName() - ); - channel.sendResponse( - instance.new NodeResponse(node.getId(), secondGenIndex.getNumberOfShards(), shards, List.of()) - ); - }); - } + mockStatsForIndex(clusterStateBeforeRollover, assignedShardNodeId, secondGenIndex, shards); RolloverRequest request = new RolloverRequest(dataStreamName, null); request.lazy(true); @@ -612,4 +542,33 @@ public Settings additionalSettings() { } } + private static void mockStatsForIndex( + ClusterState clusterState, + String assignedShardNodeId, + IndexMetadata indexMetadata, + List shards + ) { + for (DiscoveryNode node : clusterState.nodes().getAllNodes()) { + // one node returns the stats for all our shards, the other nodes don't return any stats + if (node.getId().equals(assignedShardNodeId)) { + MockTransportService.getInstance(node.getName()) + .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { + TransportIndicesStatsAction instance = internalCluster().getInstance( + TransportIndicesStatsAction.class, + node.getName() + ); + channel.sendResponse(instance.new NodeResponse(node.getId(), indexMetadata.getNumberOfShards(), shards, List.of())); + }); + } else { + MockTransportService.getInstance(node.getName()) + .addRequestHandlingBehavior(IndicesStatsAction.NAME + "[n]", (handler, request, channel, task) -> { + TransportIndicesStatsAction instance = internalCluster().getInstance( + TransportIndicesStatsAction.class, + node.getName() + ); + channel.sendResponse(instance.new NodeResponse(node.getId(), 0, List.of(), List.of())); + }); + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 774bfae53fb94..bd507ee9054f1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -250,11 +250,16 @@ protected void masterOperation( final Optional indexStats = Optional.ofNullable(statsResponse) .map(stats -> stats.getIndex(dataStream.getWriteIndex().getName())); - Double writeLoad = indexStats.map(stats -> stats.getTotal().getIndexing()) - .map(indexing -> indexing.getTotal().getWriteLoad()) - .orElse(null); - - rolloverAutoSharding = dataStreamAutoShardingService.calculate(clusterState, dataStream, writeLoad); + Double indexWriteLoad = indexStats.map( + stats -> Arrays.stream(stats.getShards()) + .filter(shardStats -> shardStats.getStats().indexing != null) + // only take primaries into account as in stateful the replicas also index data + .filter(shardStats -> shardStats.getShardRouting().primary()) + .map(shardStats -> shardStats.getStats().indexing.getTotal().getWriteLoad()) + .reduce(0.0, Double::sum) + ).orElse(null); + + rolloverAutoSharding = dataStreamAutoShardingService.calculate(clusterState, dataStream, indexWriteLoad); logger.debug("auto sharding result for data stream [{}] is [{}]", dataStream.getName(), rolloverAutoSharding); // if auto sharding recommends increasing the number of shards we want to trigger a rollover even if there are no diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java index 06aec69bc97da..a045c73cc83a1 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingService.java @@ -29,7 +29,6 @@ import java.util.List; import java.util.Objects; import java.util.OptionalDouble; -import java.util.OptionalLong; import java.util.function.Function; import java.util.function.LongSupplier; @@ -381,27 +380,11 @@ static double getMaxIndexLoadWithinCoolingPeriod( // assume the current write index load is the highest observed and look back to find the actual maximum double maxIndexLoadWithinCoolingPeriod = writeIndexLoad; for (IndexWriteLoad writeLoad : writeLoadsWithinCoolingPeriod) { - // the IndexWriteLoad stores _for each shard_ a shard average write load ( calculated using : shard indexing time / shard - // uptime ) and its corresponding shard uptime - // - // to reconstruct the average _index_ write load we recalculate the shard indexing time by multiplying the shard write load - // to its uptime, and then, having the indexing time and uptime for each shard we calculate the average _index_ write load using - // (indexingTime_shard0 + indexingTime_shard1) / (uptime_shard0 + uptime_shard1) - // as {@link org.elasticsearch.index.shard.IndexingStats#add} does - double totalShardIndexingTime = 0; - long totalShardUptime = 0; + double totalIndexLoad = 0; for (int shardId = 0; shardId < writeLoad.numberOfShards(); shardId++) { final OptionalDouble writeLoadForShard = writeLoad.getWriteLoadForShard(shardId); - final OptionalLong uptimeInMillisForShard = writeLoad.getUptimeInMillisForShard(shardId); - if (writeLoadForShard.isPresent()) { - assert uptimeInMillisForShard.isPresent(); - double shardIndexingTime = writeLoadForShard.getAsDouble() * uptimeInMillisForShard.getAsLong(); - long shardUptimeInMillis = uptimeInMillisForShard.getAsLong(); - totalShardIndexingTime += shardIndexingTime; - totalShardUptime += shardUptimeInMillis; - } + totalIndexLoad += writeLoadForShard.orElse(0); } - double totalIndexLoad = totalShardUptime == 0 ? 0.0 : (totalShardIndexingTime / totalShardUptime); if (totalIndexLoad > maxIndexLoadWithinCoolingPeriod) { maxIndexLoadWithinCoolingPeriod = totalIndexLoad; } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index bc1ec6788eec6..7f50ebca36fc5 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -51,9 +51,7 @@ import static org.elasticsearch.action.datastreams.autosharding.AutoShardingType.INCREASE_SHARDS; import static org.elasticsearch.action.datastreams.autosharding.AutoShardingType.NO_CHANGE_REQUIRED; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThan; public class DataStreamAutoShardingServiceTests extends ESTestCase { @@ -646,10 +644,10 @@ public void testGetMaxIndexLoadWithinCoolingPeriod() { () -> now ); // to cover the entire cooldown period, the last index before the cooling period is taken into account - assertThat(maxIndexLoadWithinCoolingPeriod, is(lastIndexBeforeCoolingPeriodHasLowWriteLoad ? 5.0 : 999.0)); + assertThat(maxIndexLoadWithinCoolingPeriod, is(lastIndexBeforeCoolingPeriodHasLowWriteLoad ? 15.0 : 999.0)); } - public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { + public void testIndexLoadWithinCoolingPeriodIsSumOfShardsLoads() { final TimeValue coolingPeriod = TimeValue.timeValueDays(3); final Metadata.Builder metadataBuilder = Metadata.builder(); @@ -658,6 +656,8 @@ public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { final String dataStreamName = "logs"; long now = System.currentTimeMillis(); + double expectedIsSumOfShardLoads = 0.5 + 3.0 + 0.3333; + for (int i = 0; i < numberOfBackingIndicesWithinCoolingPeriod; i++) { final long createdAt = now - (coolingPeriod.getMillis() / 2); IndexMetadata indexMetadata; @@ -705,8 +705,7 @@ public void testIndexLoadWithinCoolingPeriodIsShardLoadsAvg() { coolingPeriod, () -> now ); - assertThat(maxIndexLoadWithinCoolingPeriod, is(greaterThan(0.499))); - assertThat(maxIndexLoadWithinCoolingPeriod, is(lessThan(0.5))); + assertThat(maxIndexLoadWithinCoolingPeriod, is(expectedIsSumOfShardLoads)); } public void testAutoShardingResultValidation() { From ee6741212ec3e705fae9c54e832af3e4ba58091f Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 27 Mar 2024 12:03:29 +0100 Subject: [PATCH 204/214] Introduce test-arm PR label (#106801) --- .buildkite/pipelines/pull-request/part-1-arm.yml | 13 +++++++++++++ .buildkite/pipelines/pull-request/part-2-arm.yml | 13 +++++++++++++ .buildkite/pipelines/pull-request/part-3-arm.yml | 13 +++++++++++++ .buildkite/pipelines/pull-request/part-4-arm.yml | 13 +++++++++++++ 4 files changed, 52 insertions(+) create mode 100644 .buildkite/pipelines/pull-request/part-1-arm.yml create mode 100644 .buildkite/pipelines/pull-request/part-2-arm.yml create mode 100644 .buildkite/pipelines/pull-request/part-3-arm.yml create mode 100644 .buildkite/pipelines/pull-request/part-4-arm.yml diff --git a/.buildkite/pipelines/pull-request/part-1-arm.yml b/.buildkite/pipelines/pull-request/part-1-arm.yml new file mode 100644 index 0000000000000..5e94f90c67754 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-1-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-1-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart1 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-2-arm.yml b/.buildkite/pipelines/pull-request/part-2-arm.yml new file mode 100644 index 0000000000000..9a89ddf4a0eac --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-2-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-2-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart2 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-3-arm.yml b/.buildkite/pipelines/pull-request/part-3-arm.yml new file mode 100644 index 0000000000000..003300597af28 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-3-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-3-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart3 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/pull-request/part-4-arm.yml b/.buildkite/pipelines/pull-request/part-4-arm.yml new file mode 100644 index 0000000000000..6462eba287bd6 --- /dev/null +++ b/.buildkite/pipelines/pull-request/part-4-arm.yml @@ -0,0 +1,13 @@ +config: + allow-labels: "test-arm" +steps: + - label: part-4-arm + command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart4 + timeout_in_minutes: 300 + agents: + provider: aws + imagePrefix: elasticsearch-ubuntu-2004-aarch64 + instanceType: m6g.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 From 62e3e5fd1b6f8f3b9902dd8ff24da92f2517ae72 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 27 Mar 2024 12:08:10 +0100 Subject: [PATCH 205/214] ES|QL: Improve support for TEXT fields in functions (#106688) --- docs/changelog/106688.yaml | 5 + .../description/date_format.asciidoc | 7 ++ .../functions/layout/date_format.asciidoc | 14 +++ .../functions/parameters/date_format.asciidoc | 7 ++ .../esql/functions/signature/date_format.svg | 1 + .../functions/types/date_extract.asciidoc | 1 + .../esql/functions/types/date_format.asciidoc | 10 ++ .../esql/functions/types/date_parse.asciidoc | 1 + .../src/main/resources/meta.csv-spec | 18 +-- .../src/main/resources/string.csv-spec | 8 ++ .../esql/expression/EsqlTypeResolutions.java | 9 ++ .../function/scalar/date/DateExtract.java | 7 +- .../function/scalar/date/DateFormat.java | 19 ++-- .../function/scalar/date/DateParse.java | 7 +- .../function/scalar/ip/CIDRMatch.java | 11 +- .../function/scalar/string/Split.java | 5 +- .../function/AbstractFunctionTestCase.java | 1 + .../AbstractScalarFunctionTestCase.java | 3 + .../scalar/date/DateExtractTests.java | 12 ++ .../function/scalar/date/DateFormatTests.java | 79 +++++++++++++ .../function/scalar/date/DateParseTests.java | 12 ++ .../function/scalar/ip/CIDRMatchTests.java | 104 ++++++++++++++++++ .../rest-api-spec/test/esql/100_bug_fix.yml | 61 ++++++++++ .../rest-api-spec/test/esql/80_text.yml | 20 ++++ 24 files changed, 386 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/106688.yaml create mode 100644 docs/reference/esql/functions/description/date_format.asciidoc create mode 100644 docs/reference/esql/functions/layout/date_format.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_format.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_format.svg create mode 100644 docs/reference/esql/functions/types/date_format.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java diff --git a/docs/changelog/106688.yaml b/docs/changelog/106688.yaml new file mode 100644 index 0000000000000..d72227101d610 --- /dev/null +++ b/docs/changelog/106688.yaml @@ -0,0 +1,5 @@ +pr: 106688 +summary: "ES|QL: Improve support for TEXT fields in functions" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/description/date_format.asciidoc b/docs/reference/esql/functions/description/date_format.asciidoc new file mode 100644 index 0000000000000..82cdbecaa49ef --- /dev/null +++ b/docs/reference/esql/functions/description/date_format.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a string representation of a date, in the provided format. + +NOTE: diff --git a/docs/reference/esql/functions/layout/date_format.asciidoc b/docs/reference/esql/functions/layout/date_format.asciidoc new file mode 100644 index 0000000000000..1f9199afc812c --- /dev/null +++ b/docs/reference/esql/functions/layout/date_format.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_format]] +=== `DATE_FORMAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_format.svg[Embedded,opts=inline] + +include::../parameters/date_format.asciidoc[] +include::../description/date_format.asciidoc[] +include::../types/date_format.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc new file mode 100644 index 0000000000000..7b000418b961c --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_format.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`dateFormat`:: +A valid date pattern + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/signature/date_format.svg b/docs/reference/esql/functions/signature/date_format.svg new file mode 100644 index 0000000000000..961fcff51d42b --- /dev/null +++ b/docs/reference/esql/functions/signature/date_format.svg @@ -0,0 +1 @@ +DATE_FORMAT(dateFormat,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 08bc0f6b51357..43702ef0671a7 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -6,4 +6,5 @@ |=== datePart | date | result keyword | datetime | long +text | datetime | long |=== diff --git a/docs/reference/esql/functions/types/date_format.asciidoc b/docs/reference/esql/functions/types/date_format.asciidoc new file mode 100644 index 0000000000000..a76f38653b9b8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_format.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +dateFormat | date | result +keyword | datetime | keyword +text | datetime | keyword +|=== diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index 0d9e4b30c7c7b..82ae8253baa26 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -7,4 +7,5 @@ datePattern | dateString | result keyword | keyword | datetime keyword | text | datetime +text | text | datetime |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index cd94ae793516e..7d1617b208f34 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -12,7 +12,7 @@ auto_bucket |"double|date auto_bucket(field:integer|long|double|dat avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false @@ -20,10 +20,10 @@ cosh |"double cosh(number:double|integer|long|unsigned_long) count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |long date_extract(datePart:keyword, date:date) |[datePart, date] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |keyword date_format(?dateFormat:keyword, date:date) |[dateFormat, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +date_extract |"long date_extract(datePart:keyword|text, date:date)" |[datePart, date] |["keyword|text", date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |"keyword date_format(?dateFormat:keyword|text, date:date)" |[dateFormat, date] |["keyword|text", date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword|text, dateString:keyword|text)"|[datePattern, dateString]|["keyword|text", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |["keyword", date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false e |double e() | null | null | null |double | "Euler’s number." | null | false | false ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false @@ -116,7 +116,7 @@ synopsis:keyword "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" -boolean cidr_match(ip:ip, blockX...:keyword) +"boolean cidr_match(ip:ip, blockX...:keyword|text)" "boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" "keyword concat(string1:keyword|text, string2...:keyword|text)" "double cos(number:double|integer|long|unsigned_long)" @@ -124,9 +124,9 @@ boolean cidr_match(ip:ip, blockX...:keyword) "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -long date_extract(datePart:keyword, date:date) -keyword date_format(?dateFormat:keyword, date:date) -"date date_parse(?datePattern:keyword, dateString:keyword|text)" +"long date_extract(datePart:keyword|text, date:date)" +"keyword date_format(?dateFormat:keyword|text, date:date)" +"date date_parse(?datePattern:keyword|text, dateString:keyword|text)" "date date_trunc(interval:keyword, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 06fca2682bbb9..d9c9e535c2c45 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1160,3 +1160,11 @@ required_feature: esql.agg_values null | null // end::values-grouped-result[] ; + + +splitBasedOnField +from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | keep gender, split; + +gender:keyword | split:keyword +M | [foo, bar] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index 0379f1a5d3614..85d5357d7c1ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -27,6 +27,15 @@ public class EsqlTypeResolutions { + public static Expression.TypeResolution isStringAndExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + Expression.TypeResolution resolution = TypeResolutions.isString(e, operationName, paramOrd); + if (resolution.unresolved()) { + return resolution; + } + + return isExact(e, operationName, paramOrd); + } + public static Expression.TypeResolution isExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { if (e instanceof FieldAttribute fa) { if (DataTypes.isString(fa.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 4f31f73963569..544f284791919 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -28,10 +29,10 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateExtract extends EsqlConfigurationFunction { @@ -42,7 +43,7 @@ public DateExtract( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial - @Param(name = "datePart", type = { "keyword" }, description = """ + @Param(name = "datePart", type = { "keyword", "text" }, description = """ Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; @@ -76,7 +77,7 @@ private ChronoField chronoField() { if (chronoField == null) { Expression field = children().get(0); try { - if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { + if (field.foldable() && EsqlDataTypes.isString(field.dataType())) { chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); } } catch (Exception e) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 85e8a0f3aec47..6a6e523f81974 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.session.Configuration; @@ -28,12 +29,12 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { @@ -43,7 +44,7 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") public DateFormat( Source source, - @Param(optional = true, name = "dateFormat", type = { "keyword" }, description = "A valid date pattern") Expression format, + @Param(optional = true, name = "dateFormat", type = { "keyword", "text" }, description = "A valid date pattern") Expression format, @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, Configuration configuration ) { @@ -96,23 +97,17 @@ static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_DATE_TIME_FORMATTER, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); } - if (format.dataType() != DataTypes.KEYWORD) { + if (EsqlDataTypes.isString(format.dataType()) == false) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, formatter); } var formatEvaluator = toEvaluator.apply(format); - return dvrCtx -> new DateFormatEvaluator( - source(), - fieldEvaluator.get(dvrCtx), - formatEvaluator.get(dvrCtx), - ((EsqlConfiguration) configuration()).locale(), - dvrCtx - ); + return new DateFormatEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, ((EsqlConfiguration) configuration()).locale()); } private static DateFormatter toFormatter(Object format, Locale locale) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 0bb9a5dde1959..b356dbccbeb4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -28,12 +29,12 @@ import java.util.function.Function; import static org.elasticsearch.common.time.DateFormatter.forPattern; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC; public class DateParse extends EsqlScalarFunction implements OptionalArgument { @@ -44,7 +45,7 @@ public class DateParse extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo(returnType = "date", description = "Parses a string into a date value") public DateParse( Source source, - @Param(name = "datePattern", type = { "keyword" }, description = "A valid date pattern", optional = true) Expression first, + @Param(name = "datePattern", type = { "keyword", "text" }, description = "A valid date pattern", optional = true) Expression first, @Param(name = "dateString", type = { "keyword", "text" }, description = "A string representing a date") Expression second ) { super(source, second != null ? List.of(first, second) : List.of(first)); @@ -99,7 +100,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function matches + @Param(name = "blockX", type = { "keyword", "text" }, description = "CIDR block to test the IP against.") List matches ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; @@ -76,11 +76,10 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); - return dvrCtx -> new CIDRMatchEvaluator( + return new CIDRMatchEvaluator.Factory( source(), - ipEvaluatorSupplier.get(dvrCtx), - matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx + ipEvaluatorSupplier, + matches.stream().map(x -> toEvaluator.apply(x)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 4ecc7fa1a96a7..611fc9947d3db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -25,10 +25,9 @@ import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; /** * Splits a string on some delimiter into a multivalued string field. @@ -59,7 +58,7 @@ protected TypeResolution resolveType() { return resolution; } - return isString(right(), sourceText(), SECOND); + return isStringAndExact(right(), sourceText(), SECOND); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 755e5fcf25b9b..78c1c57e07782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -896,6 +896,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List validTypes) { if (withoutNull.equals(List.of(DataTypes.DATETIME))) { return "datetime"; } + if (withoutNull.equals(List.of(DataTypes.IP))) { + return "ip"; + } List negations = Stream.concat(Stream.of(numerics()), Stream.of(EsqlDataTypes.DATE_PERIOD, EsqlDataTypes.TIME_DURATION)) .sorted(Comparator.comparing(DataType::name)) .toList(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 3a6a5d8eabae3..1e2c24062b07a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -53,6 +53,18 @@ public static Iterable parameters() { equalTo(2023L) ) ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.TEXT, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + equalTo(2023L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.DATETIME), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java new file mode 100644 index 0000000000000..3fa28c566649e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class DateFormatTests extends AbstractScalarFunctionTestCase { + public DateFormatTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.KEYWORD, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.TEXT, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ) + ) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new DateFormat(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); + } + + @Override + protected List argSpec() { + return List.of(required(strings()), required(DataTypes.DATETIME)); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.KEYWORD; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 540d1aa34474b..c7a1a945e079e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -62,6 +62,18 @@ public static Iterable parameters() { equalTo(1683244800000L) ) ), + new TestCaseSupplier( + "With Both Text", + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "second"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "first") + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + equalTo(1683244800000L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java new file mode 100644 index 0000000000000..49ad348047f45 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.ip; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_IP; +import static org.hamcrest.Matchers.equalTo; + +public class CIDRMatchTests extends AbstractScalarFunctionTestCase { + public CIDRMatchTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + + var suppliers = List.of( + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new CIDRMatch(source, args.get(0), List.of(args.get(1))); + } + + @Override + protected List argSpec() { + return List.of(required(DataTypes.IP), required(strings())); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.BOOLEAN; + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 44d7290cbc002..c72315312afce 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -193,3 +193,64 @@ - match: { columns.0.type: double } - length: { values: 1 } - match: { values.0.0: 30.0 } + + + +--- +"text in functions #105379": + - skip: + version: " - 8.13.99" + reason: "fixes in 8.13 or later" + - do: + indices.create: + index: idx_with_date_ip_txt + body: + mappings: + properties: + id: + type: long + date: + type: date + ip: + type: ip + text: + type: text + text2: + type: text + + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 1, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text":"yyyy-MM-dd", "text2":"year" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 2, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text": "192.168.0.0/16" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 3, "date": "2024-03-22T14:50:00.000Z", "ip": "10.0.0.10", "text": "192.168.0.0/16" } + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' + - match: { columns.0.name: x } + - match: { columns.0.type: keyword } + - match: { columns.1.name: y } + - match: { columns.1.type: long } + - length: { values: 1 } + - match: { values.0.0: "2024-03-22" } + - match: { values.0.1: 2024 } + - match: { values.0.2: "2024-03-14T00:00:00.000Z" } + + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' + - match: { columns.0.name: id } + - match: { columns.0.type: long } + - match: { columns.1.name: x } + - match: { columns.1.type: boolean } + - length: { values: 2 } + - match: { values.0.0: 2 } + - match: { values.0.1: true } + - match: { values.1.0: 3 } + - match: { values.1.1: false } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index d73efe1788ce3..fba68760a162f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -366,6 +366,26 @@ setup: - match: { values.0: [ "Jenny - IT Director"] } - match: { values.1: [ "John - Payroll Specialist"] } +--- +"split text": + - skip: + version: " - 8.13.99" + reason: "functions fixed for text in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' + + - match: { columns.0.name: "split" } + - match: { columns.0.type: "keyword" } + + - length: { values: 2 } + - match: { values.0: [ ["foo", "bar"] ] } + - match: { values.1: [ "baz"] } + --- "stats text with raw": From 720188e95f8e29d78117d61b402fb2ce50be06c0 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 27 Mar 2024 12:14:02 +0100 Subject: [PATCH 206/214] Revert "ES|QL: Improve support for TEXT fields in functions (#106688)" This reverts commit 62e3e5fd1b6f8f3b9902dd8ff24da92f2517ae72. --- docs/changelog/106688.yaml | 5 - .../description/date_format.asciidoc | 7 -- .../functions/layout/date_format.asciidoc | 14 --- .../functions/parameters/date_format.asciidoc | 7 -- .../esql/functions/signature/date_format.svg | 1 - .../functions/types/date_extract.asciidoc | 1 - .../esql/functions/types/date_format.asciidoc | 10 -- .../esql/functions/types/date_parse.asciidoc | 1 - .../src/main/resources/meta.csv-spec | 18 +-- .../src/main/resources/string.csv-spec | 8 -- .../esql/expression/EsqlTypeResolutions.java | 9 -- .../function/scalar/date/DateExtract.java | 7 +- .../function/scalar/date/DateFormat.java | 19 ++-- .../function/scalar/date/DateParse.java | 7 +- .../function/scalar/ip/CIDRMatch.java | 11 +- .../function/scalar/string/Split.java | 5 +- .../function/AbstractFunctionTestCase.java | 1 - .../AbstractScalarFunctionTestCase.java | 3 - .../scalar/date/DateExtractTests.java | 12 -- .../function/scalar/date/DateFormatTests.java | 79 ------------- .../function/scalar/date/DateParseTests.java | 12 -- .../function/scalar/ip/CIDRMatchTests.java | 104 ------------------ .../rest-api-spec/test/esql/100_bug_fix.yml | 61 ---------- .../rest-api-spec/test/esql/80_text.yml | 20 ---- 24 files changed, 36 insertions(+), 386 deletions(-) delete mode 100644 docs/changelog/106688.yaml delete mode 100644 docs/reference/esql/functions/description/date_format.asciidoc delete mode 100644 docs/reference/esql/functions/layout/date_format.asciidoc delete mode 100644 docs/reference/esql/functions/parameters/date_format.asciidoc delete mode 100644 docs/reference/esql/functions/signature/date_format.svg delete mode 100644 docs/reference/esql/functions/types/date_format.asciidoc delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java diff --git a/docs/changelog/106688.yaml b/docs/changelog/106688.yaml deleted file mode 100644 index d72227101d610..0000000000000 --- a/docs/changelog/106688.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 106688 -summary: "ES|QL: Improve support for TEXT fields in functions" -area: ES|QL -type: bug -issues: [] diff --git a/docs/reference/esql/functions/description/date_format.asciidoc b/docs/reference/esql/functions/description/date_format.asciidoc deleted file mode 100644 index 82cdbecaa49ef..0000000000000 --- a/docs/reference/esql/functions/description/date_format.asciidoc +++ /dev/null @@ -1,7 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -*Description* - -Returns a string representation of a date, in the provided format. - -NOTE: diff --git a/docs/reference/esql/functions/layout/date_format.asciidoc b/docs/reference/esql/functions/layout/date_format.asciidoc deleted file mode 100644 index 1f9199afc812c..0000000000000 --- a/docs/reference/esql/functions/layout/date_format.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -[discrete] -[[esql-date_format]] -=== `DATE_FORMAT` - -*Syntax* - -[.text-center] -image::esql/functions/signature/date_format.svg[Embedded,opts=inline] - -include::../parameters/date_format.asciidoc[] -include::../description/date_format.asciidoc[] -include::../types/date_format.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc deleted file mode 100644 index 7b000418b961c..0000000000000 --- a/docs/reference/esql/functions/parameters/date_format.asciidoc +++ /dev/null @@ -1,7 +0,0 @@ -*Parameters* - -`dateFormat`:: -A valid date pattern - -`date`:: -Date expression diff --git a/docs/reference/esql/functions/signature/date_format.svg b/docs/reference/esql/functions/signature/date_format.svg deleted file mode 100644 index 961fcff51d42b..0000000000000 --- a/docs/reference/esql/functions/signature/date_format.svg +++ /dev/null @@ -1 +0,0 @@ -DATE_FORMAT(dateFormat,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 43702ef0671a7..08bc0f6b51357 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -6,5 +6,4 @@ |=== datePart | date | result keyword | datetime | long -text | datetime | long |=== diff --git a/docs/reference/esql/functions/types/date_format.asciidoc b/docs/reference/esql/functions/types/date_format.asciidoc deleted file mode 100644 index a76f38653b9b8..0000000000000 --- a/docs/reference/esql/functions/types/date_format.asciidoc +++ /dev/null @@ -1,10 +0,0 @@ -// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. - -*Supported types* - -[%header.monospaced.styled,format=dsv,separator=|] -|=== -dateFormat | date | result -keyword | datetime | keyword -text | datetime | keyword -|=== diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index 82ae8253baa26..0d9e4b30c7c7b 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -7,5 +7,4 @@ datePattern | dateString | result keyword | keyword | datetime keyword | text | datetime -text | text | datetime |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 7d1617b208f34..cd94ae793516e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -12,7 +12,7 @@ auto_bucket |"double|date auto_bucket(field:integer|long|double|dat avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false @@ -20,10 +20,10 @@ cosh |"double cosh(number:double|integer|long|unsigned_long) count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |"long date_extract(datePart:keyword|text, date:date)" |[datePart, date] |["keyword|text", date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |"keyword date_format(?dateFormat:keyword|text, date:date)" |[dateFormat, date] |["keyword|text", date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword|text, dateString:keyword|text)"|[datePattern, dateString]|["keyword|text", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |["keyword", date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +date_extract |long date_extract(datePart:keyword, date:date) |[datePart, date] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |keyword date_format(?dateFormat:keyword, date:date) |[dateFormat, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false e |double e() | null | null | null |double | "Euler’s number." | null | false | false ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false @@ -116,7 +116,7 @@ synopsis:keyword "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" -"boolean cidr_match(ip:ip, blockX...:keyword|text)" +boolean cidr_match(ip:ip, blockX...:keyword) "boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" "keyword concat(string1:keyword|text, string2...:keyword|text)" "double cos(number:double|integer|long|unsigned_long)" @@ -124,9 +124,9 @@ synopsis:keyword "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -"long date_extract(datePart:keyword|text, date:date)" -"keyword date_format(?dateFormat:keyword|text, date:date)" -"date date_parse(?datePattern:keyword|text, dateString:keyword|text)" +long date_extract(datePart:keyword, date:date) +keyword date_format(?dateFormat:keyword, date:date) +"date date_parse(?datePattern:keyword, dateString:keyword|text)" "date date_trunc(interval:keyword, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index d9c9e535c2c45..06fca2682bbb9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1160,11 +1160,3 @@ required_feature: esql.agg_values null | null // end::values-grouped-result[] ; - - -splitBasedOnField -from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | keep gender, split; - -gender:keyword | split:keyword -M | [foo, bar] -; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index 85d5357d7c1ef..0379f1a5d3614 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -27,15 +27,6 @@ public class EsqlTypeResolutions { - public static Expression.TypeResolution isStringAndExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { - Expression.TypeResolution resolution = TypeResolutions.isString(e, operationName, paramOrd); - if (resolution.unresolved()) { - return resolution; - } - - return isExact(e, operationName, paramOrd); - } - public static Expression.TypeResolution isExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { if (e instanceof FieldAttribute fa) { if (DataTypes.isString(fa.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 544f284791919..4f31f73963569 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -29,10 +28,10 @@ import java.util.List; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateExtract extends EsqlConfigurationFunction { @@ -43,7 +42,7 @@ public DateExtract( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial - @Param(name = "datePart", type = { "keyword", "text" }, description = """ + @Param(name = "datePart", type = { "keyword" }, description = """ Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; @@ -77,7 +76,7 @@ private ChronoField chronoField() { if (chronoField == null) { Expression field = children().get(0); try { - if (field.foldable() && EsqlDataTypes.isString(field.dataType())) { + if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); } } catch (Exception e) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 6a6e523f81974..85e8a0f3aec47 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.session.Configuration; @@ -29,12 +28,12 @@ import java.util.Locale; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { @@ -44,7 +43,7 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") public DateFormat( Source source, - @Param(optional = true, name = "dateFormat", type = { "keyword", "text" }, description = "A valid date pattern") Expression format, + @Param(optional = true, name = "dateFormat", type = { "keyword" }, description = "A valid date pattern") Expression format, @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, Configuration configuration ) { @@ -97,17 +96,23 @@ static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_DATE_TIME_FORMATTER, dvrCtx); } - if (EsqlDataTypes.isString(format.dataType()) == false) { + if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, formatter); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); } var formatEvaluator = toEvaluator.apply(format); - return new DateFormatEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, ((EsqlConfiguration) configuration()).locale()); + return dvrCtx -> new DateFormatEvaluator( + source(), + fieldEvaluator.get(dvrCtx), + formatEvaluator.get(dvrCtx), + ((EsqlConfiguration) configuration()).locale(), + dvrCtx + ); } private static DateFormatter toFormatter(Object format, Locale locale) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index b356dbccbeb4c..0bb9a5dde1959 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -15,7 +15,6 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -29,12 +28,12 @@ import java.util.function.Function; import static org.elasticsearch.common.time.DateFormatter.forPattern; -import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC; public class DateParse extends EsqlScalarFunction implements OptionalArgument { @@ -45,7 +44,7 @@ public class DateParse extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo(returnType = "date", description = "Parses a string into a date value") public DateParse( Source source, - @Param(name = "datePattern", type = { "keyword", "text" }, description = "A valid date pattern", optional = true) Expression first, + @Param(name = "datePattern", type = { "keyword" }, description = "A valid date pattern", optional = true) Expression first, @Param(name = "dateString", type = { "keyword", "text" }, description = "A string representing a date") Expression second ) { super(source, second != null ? List.of(first, second) : List.of(first)); @@ -100,7 +99,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function matches + @Param(name = "blockX", type = { "keyword" }, description = "CIDR block to test the IP against.") List matches ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; @@ -76,10 +76,11 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); - return new CIDRMatchEvaluator.Factory( + return dvrCtx -> new CIDRMatchEvaluator( source(), - ipEvaluatorSupplier, - matches.stream().map(x -> toEvaluator.apply(x)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) + ipEvaluatorSupplier.get(dvrCtx), + matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), + dvrCtx ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 611fc9947d3db..4ecc7fa1a96a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -25,9 +25,10 @@ import java.util.function.Function; -import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; /** * Splits a string on some delimiter into a multivalued string field. @@ -58,7 +59,7 @@ protected TypeResolution resolveType() { return resolution; } - return isStringAndExact(right(), sourceText(), SECOND); + return isString(right(), sourceText(), SECOND); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 78c1c57e07782..755e5fcf25b9b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -896,7 +896,6 @@ protected static String typeErrorMessage(boolean includeOrdinal, List validTypes) { if (withoutNull.equals(List.of(DataTypes.DATETIME))) { return "datetime"; } - if (withoutNull.equals(List.of(DataTypes.IP))) { - return "ip"; - } List negations = Stream.concat(Stream.of(numerics()), Stream.of(EsqlDataTypes.DATE_PERIOD, EsqlDataTypes.TIME_DURATION)) .sorted(Comparator.comparing(DataType::name)) .toList(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 1e2c24062b07a..3a6a5d8eabae3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -53,18 +53,6 @@ public static Iterable parameters() { equalTo(2023L) ) ), - new TestCaseSupplier( - List.of(DataTypes.TEXT, DataTypes.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.TEXT, "chrono"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") - ), - "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", - DataTypes.LONG, - equalTo(2023L) - ) - ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.DATETIME), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java deleted file mode 100644 index 3fa28c566649e..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.date; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; - -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class DateFormatTests extends AbstractScalarFunctionTestCase { - public DateFormatTests(@Name("TestCase") Supplier testCaseSupplier) { - this.testCase = testCaseSupplier.get(); - } - - @ParametersFactory - public static Iterable parameters() { - return parameterSuppliersFromTypedData( - List.of( - new TestCaseSupplier( - List.of(DataTypes.KEYWORD, DataTypes.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.KEYWORD, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") - ), - "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataTypes.KEYWORD, - equalTo(BytesRefs.toBytesRef("2023")) - ) - ), - new TestCaseSupplier( - List.of(DataTypes.TEXT, DataTypes.DATETIME), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.TEXT, "formatter"), - new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") - ), - "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", - DataTypes.KEYWORD, - equalTo(BytesRefs.toBytesRef("2023")) - ) - ) - ) - ); - } - - @Override - protected Expression build(Source source, List args) { - return new DateFormat(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); - } - - @Override - protected List argSpec() { - return List.of(required(strings()), required(DataTypes.DATETIME)); - } - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.KEYWORD; - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index c7a1a945e079e..540d1aa34474b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -62,18 +62,6 @@ public static Iterable parameters() { equalTo(1683244800000L) ) ), - new TestCaseSupplier( - "With Both Text", - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "second"), - new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "first") - ), - "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", - DataTypes.DATETIME, - equalTo(1683244800000L) - ) - ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java deleted file mode 100644 index 49ad348047f45..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.function.scalar.ip; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; -import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; - -import java.util.List; -import java.util.function.Supplier; - -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_IP; -import static org.hamcrest.Matchers.equalTo; - -public class CIDRMatchTests extends AbstractScalarFunctionTestCase { - public CIDRMatchTests(@Name("TestCase") Supplier testCaseSupplier) { - this.testCase = testCaseSupplier.get(); - } - - @ParametersFactory - public static Iterable parameters() { - - var suppliers = List.of( - new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.KEYWORD), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.KEYWORD, "cidrs") - ), - "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, - equalTo(true) - ) - ), - new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.TEXT), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.TEXT, "cidrs") - ), - "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, - equalTo(true) - ) - ), - new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.KEYWORD), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.KEYWORD, "cidrs") - ), - "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, - equalTo(false) - ) - ), - new TestCaseSupplier( - List.of(DataTypes.IP, DataTypes.TEXT), - () -> new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(STRING_TO_IP.convert(BytesRefs.toBytesRef("192.168.0.10")), DataTypes.IP, "ip"), - new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.TEXT, "cidrs") - ), - "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", - DataTypes.BOOLEAN, - equalTo(false) - ) - ) - ); - - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); - } - - @Override - protected Expression build(Source source, List args) { - return new CIDRMatch(source, args.get(0), List.of(args.get(1))); - } - - @Override - protected List argSpec() { - return List.of(required(DataTypes.IP), required(strings())); - } - - @Override - protected DataType expectedType(List argTypes) { - return DataTypes.BOOLEAN; - } -} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index c72315312afce..44d7290cbc002 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -193,64 +193,3 @@ - match: { columns.0.type: double } - length: { values: 1 } - match: { values.0.0: 30.0 } - - - ---- -"text in functions #105379": - - skip: - version: " - 8.13.99" - reason: "fixes in 8.13 or later" - - do: - indices.create: - index: idx_with_date_ip_txt - body: - mappings: - properties: - id: - type: long - date: - type: date - ip: - type: ip - text: - type: text - text2: - type: text - - - do: - bulk: - refresh: true - body: - - { "index": { "_index": "idx_with_date_ip_txt" } } - - { "id": 1, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text":"yyyy-MM-dd", "text2":"year" } - - { "index": { "_index": "idx_with_date_ip_txt" } } - - { "id": 2, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text": "192.168.0.0/16" } - - { "index": { "_index": "idx_with_date_ip_txt" } } - - { "id": 3, "date": "2024-03-22T14:50:00.000Z", "ip": "10.0.0.10", "text": "192.168.0.0/16" } - - do: - esql.query: - body: - query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' - - match: { columns.0.name: x } - - match: { columns.0.type: keyword } - - match: { columns.1.name: y } - - match: { columns.1.type: long } - - length: { values: 1 } - - match: { values.0.0: "2024-03-22" } - - match: { values.0.1: 2024 } - - match: { values.0.2: "2024-03-14T00:00:00.000Z" } - - - do: - esql.query: - body: - query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' - - match: { columns.0.name: id } - - match: { columns.0.type: long } - - match: { columns.1.name: x } - - match: { columns.1.type: boolean } - - length: { values: 2 } - - match: { values.0.0: 2 } - - match: { values.0.1: true } - - match: { values.1.0: 3 } - - match: { values.1.1: false } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index fba68760a162f..d73efe1788ce3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -366,26 +366,6 @@ setup: - match: { values.0: [ "Jenny - IT Director"] } - match: { values.1: [ "John - Payroll Specialist"] } ---- -"split text": - - skip: - version: " - 8.13.99" - reason: "functions fixed for text in v 8.14" - features: allowed_warnings_regex - - do: - allowed_warnings_regex: - - "No limit defined, adding default limit of \\[.*\\]" - esql.query: - body: - query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' - - - match: { columns.0.name: "split" } - - match: { columns.0.type: "keyword" } - - - length: { values: 2 } - - match: { values.0: [ ["foo", "bar"] ] } - - match: { values.1: [ "baz"] } - --- "stats text with raw": From 2b6b7ae4743f104e8f6562ba1690e91c3e0b5c77 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 27 Mar 2024 08:34:28 -0400 Subject: [PATCH 207/214] [ML] Adding elementType to service settings and persisting byte instead of int8 (#106700) * Adding element type method and storing byte instead of int8 * Adding more tests and checking for null * Converting between element type and cohere embedding type * Update server/src/main/java/org/elasticsearch/inference/ServiceSettings.java Co-authored-by: David Kyle * enum tests --------- Co-authored-by: David Kyle --- .../org/elasticsearch/TransportVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 4 + .../inference/ServiceSettings.java | 10 +++ .../cohere/CohereEmbeddingsRequestEntity.java | 2 +- .../services/cohere/CohereService.java | 2 +- .../embeddings/CohereEmbeddingType.java | 84 ++++++++++++++++++- .../CohereEmbeddingsServiceSettings.java | 76 +++++++++-------- ...lingualE5SmallInternalServiceSettings.java | 6 ++ .../HuggingFaceServiceSettings.java | 6 ++ .../OpenAiEmbeddingsServiceSettings.java | 6 ++ .../CohereEmbeddingsRequestEntityTests.java | 16 ++++ .../cohere/CohereEmbeddingsRequestTests.java | 15 ++-- .../services/cohere/CohereServiceTests.java | 25 ++++-- .../embeddings/CohereEmbeddingTypeTests.java | 60 +++++++++++++ .../CohereEmbeddingsModelTests.java | 3 +- .../CohereEmbeddingsServiceSettingsTests.java | 66 ++++++++++----- 16 files changed, 308 insertions(+), 74 deletions(-) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c23d961119a74..c0970c9a6caa0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -152,6 +152,7 @@ static TransportVersion def(int id) { public static final TransportVersion KNN_QUERY_VECTOR_BUILDER = def(8_612_00_0); public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0); public static final TransportVersion ML_COMPLETION_INFERENCE_SERVICE_ADDED = def(8_614_00_0); + public static final TransportVersion ML_INFERENCE_EMBEDDING_BYTE_ADDED = def(8_615_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 22b8549e14969..73e29a98c1531 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -725,6 +725,10 @@ static Function errorByteElementsAppender(byte[] v } public abstract double computeDotProduct(VectorData vectorData); + + public static ElementType fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } } static final Map namesToElementType = Map.of( diff --git a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java index 2e745635d0fd9..6c1a01acb0dab 100644 --- a/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java +++ b/server/src/main/java/org/elasticsearch/inference/ServiceSettings.java @@ -9,6 +9,7 @@ package org.elasticsearch.inference; import org.elasticsearch.common.io.stream.VersionedNamedWriteable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.ToXContentObject; public interface ServiceSettings extends ToXContentObject, VersionedNamedWriteable { @@ -36,4 +37,13 @@ default Integer dimensions() { return null; } + /** + * The data type for the embeddings this service works with. Defaults to null, + * Text Embedding models should return a non-null value + * + * @return the element type + */ + default DenseVectorFieldMapper.ElementType elementType() { + return null; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 45c7372a4dc22..035bd44ebf405 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -57,7 +57,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } if (embeddingType != null) { - builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType)); + builder.field(EMBEDDING_TYPES_FIELD, List.of(embeddingType.toRequestString())); } if (taskSettings.getTruncation() != null) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 2bd88f6f01eb4..4ac6ec7220de4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -232,6 +232,6 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED; + return TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java index 82d57cfb92381..8dbbbf7011e86 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingType.java @@ -7,7 +7,15 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; + +import java.util.Arrays; +import java.util.EnumSet; import java.util.Locale; +import java.util.Map; /** * Defines the type of embedding that the cohere api should return for a request. @@ -20,17 +28,48 @@ public enum CohereEmbeddingType { /** * Use this when you want to get back the default float embeddings. Valid for all models. */ - FLOAT, + FLOAT(DenseVectorFieldMapper.ElementType.FLOAT, RequestConstants.FLOAT), /** * Use this when you want to get back signed int8 embeddings. Valid for only v3 models. */ - INT8; + INT8(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8), + /** + * This is a synonym for INT8 + */ + BYTE(DenseVectorFieldMapper.ElementType.BYTE, RequestConstants.INT8); + + private static final class RequestConstants { + private static final String FLOAT = "float"; + private static final String INT8 = "int8"; + } + + private static final Map ELEMENT_TYPE_TO_COHERE_EMBEDDING = Map.of( + DenseVectorFieldMapper.ElementType.FLOAT, + FLOAT, + DenseVectorFieldMapper.ElementType.BYTE, + BYTE + ); + static final EnumSet SUPPORTED_ELEMENT_TYPES = EnumSet.copyOf( + ELEMENT_TYPE_TO_COHERE_EMBEDDING.keySet() + ); + + private final DenseVectorFieldMapper.ElementType elementType; + private final String requestString; + + CohereEmbeddingType(DenseVectorFieldMapper.ElementType elementType, String requestString) { + this.elementType = elementType; + this.requestString = requestString; + } @Override public String toString() { return name().toLowerCase(Locale.ROOT); } + public String toRequestString() { + return requestString; + } + public static String toLowerCase(CohereEmbeddingType type) { return type.toString().toLowerCase(Locale.ROOT); } @@ -38,4 +77,45 @@ public static String toLowerCase(CohereEmbeddingType type) { public static CohereEmbeddingType fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } + + public static CohereEmbeddingType fromElementType(DenseVectorFieldMapper.ElementType elementType) { + var embedding = ELEMENT_TYPE_TO_COHERE_EMBEDDING.get(elementType); + + if (embedding == null) { + var validElementTypes = SUPPORTED_ELEMENT_TYPES.stream() + .map(value -> value.toString().toLowerCase(Locale.ROOT)) + .toArray(String[]::new); + Arrays.sort(validElementTypes); + + throw new IllegalArgumentException( + Strings.format( + "Element type [%s] does not map to a Cohere embedding value, must be one of [%s]", + elementType, + String.join(", ", validElementTypes) + ) + ); + } + + return embedding; + } + + public DenseVectorFieldMapper.ElementType toElementType() { + return elementType; + } + + /** + * Returns an embedding type that is known based on the transport version provided. If the embedding type enum was not yet + * introduced it will be defaulted INT8. + * + * @param embeddingType the value to translate if necessary + * @param version the version that dictates the translation + * @return the embedding type that is known to the version passed in + */ + public static CohereEmbeddingType translateToVersion(CohereEmbeddingType embeddingType, TransportVersion version) { + if (version.before(TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED) && embeddingType == BYTE) { + return INT8; + } + + return embeddingType; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index a8ae8aa8d7fdd..22f652e73526f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; @@ -22,32 +22,21 @@ import java.io.IOException; import java.util.EnumSet; -import java.util.Locale; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; public class CohereEmbeddingsServiceSettings implements ServiceSettings { public static final String NAME = "cohere_embeddings_service_settings"; static final String EMBEDDING_TYPE = "embedding_type"; - static final String EMBEDDING_TYPE_BYTE = "byte"; public static CohereEmbeddingsServiceSettings fromMap(Map map, ConfigurationParseContext context) { ValidationException validationException = new ValidationException(); var commonServiceSettings = CohereServiceSettings.fromMap(map, context); - translateEmbeddingType(map, context); - - CohereEmbeddingType embeddingTypes = extractOptionalEnum( - map, - EMBEDDING_TYPE, - ModelConfigurations.SERVICE_SETTINGS, - CohereEmbeddingType::fromString, - EnumSet.allOf(CohereEmbeddingType.class), - validationException - ); + + CohereEmbeddingType embeddingTypes = parseEmbeddingType(map, context, validationException); if (validationException.validationErrors().isEmpty() == false) { throw validationException; @@ -56,37 +45,51 @@ public static CohereEmbeddingsServiceSettings fromMap(Map map, C return new CohereEmbeddingsServiceSettings(commonServiceSettings, embeddingTypes); } - private static void translateEmbeddingType(Map map, ConfigurationParseContext context) { - if (ConfigurationParseContext.isRequestContext(context) == false || map.containsKey(EMBEDDING_TYPE) == false) { - return; + private static CohereEmbeddingType parseEmbeddingType( + Map map, + ConfigurationParseContext context, + ValidationException validationException + ) { + if (context == ConfigurationParseContext.REQUEST) { + return Objects.requireNonNullElse( + extractOptionalEnum( + map, + EMBEDDING_TYPE, + ModelConfigurations.SERVICE_SETTINGS, + CohereEmbeddingType::fromString, + EnumSet.allOf(CohereEmbeddingType.class), + validationException + ), + CohereEmbeddingType.FLOAT + ); } - ValidationException validationException = new ValidationException(); - - String embeddingType = extractRequiredString(map, EMBEDDING_TYPE, ModelConfigurations.SERVICE_SETTINGS, validationException); - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } + DenseVectorFieldMapper.ElementType elementType = Objects.requireNonNullElse( + extractOptionalEnum( + map, + EMBEDDING_TYPE, + ModelConfigurations.SERVICE_SETTINGS, + DenseVectorFieldMapper.ElementType::fromString, + CohereEmbeddingType.SUPPORTED_ELEMENT_TYPES, + validationException + ), + DenseVectorFieldMapper.ElementType.FLOAT + ); - assert embeddingType != null; - if (embeddingType.toLowerCase(Locale.ROOT).equals(EMBEDDING_TYPE_BYTE)) { - map.put(EMBEDDING_TYPE, CohereEmbeddingType.INT8.toString()); - } else { - map.put(EMBEDDING_TYPE, embeddingType); - } + return CohereEmbeddingType.fromElementType(elementType); } private final CohereServiceSettings commonSettings; private final CohereEmbeddingType embeddingType; - public CohereEmbeddingsServiceSettings(CohereServiceSettings commonSettings, @Nullable CohereEmbeddingType embeddingType) { + public CohereEmbeddingsServiceSettings(CohereServiceSettings commonSettings, CohereEmbeddingType embeddingType) { this.commonSettings = commonSettings; - this.embeddingType = embeddingType; + this.embeddingType = Objects.requireNonNull(embeddingType); } public CohereEmbeddingsServiceSettings(StreamInput in) throws IOException { commonSettings = new CohereServiceSettings(in); - embeddingType = in.readOptionalEnum(CohereEmbeddingType.class); + embeddingType = Objects.requireNonNullElse(in.readOptionalEnum(CohereEmbeddingType.class), CohereEmbeddingType.FLOAT); } public CohereServiceSettings getCommonSettings() { @@ -97,6 +100,11 @@ public CohereEmbeddingType getEmbeddingType() { return embeddingType; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return embeddingType == null ? DenseVectorFieldMapper.ElementType.FLOAT : embeddingType.toElementType(); + } + @Override public String getWriteableName() { return NAME; @@ -107,7 +115,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); commonSettings.toXContentFragment(builder); - builder.field(EMBEDDING_TYPE, embeddingType); + builder.field(EMBEDDING_TYPE, elementType()); builder.endObject(); return builder; @@ -126,7 +134,7 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { commonSettings.writeTo(out); - out.writeOptionalEnum(embeddingType); + out.writeOptionalEnum(CohereEmbeddingType.translateToVersion(embeddingType, out.getTransportVersion())); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 5e93c1a46f796..4445c5674277f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; @@ -106,4 +107,9 @@ public void writeTo(StreamOutput out) throws IOException { public Integer dimensions() { return 384; } + + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index f176cf7580567..b9c412fc425c6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; @@ -156,6 +157,11 @@ public Integer maxInputTokens() { return maxInputTokens; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 01aa4f51799fb..c821039e3fc76 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.SimilarityMeasure; @@ -211,6 +212,11 @@ public String modelId() { return modelId; } + @Override + public DenseVectorFieldMapper.ElementType elementType() { + return DenseVectorFieldMapper.ElementType.FLOAT; + } + @Override public String getWriteableName() { return NAME; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java index 2d3ff25222ab9..0690bf56893ca 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntityTests.java @@ -56,6 +56,22 @@ public void testXContent_InputTypeSearch_EmbeddingTypesInt8_TruncateNone() throw {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); } + public void testXContent_InputTypeSearch_EmbeddingTypesByte_TruncateNone() throws IOException { + var entity = new CohereEmbeddingsRequestEntity( + List.of("abc"), + new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE), + "model", + CohereEmbeddingType.BYTE + ); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + MatcherAssert.assertThat(xContentResult, is(""" + {"texts":["abc"],"model":"model","input_type":"search_query","embedding_types":["int8"],"truncate":"none"}""")); + } + public void testXContent_WritesNoOptionalFields_WhenTheyAreNotDefined() throws IOException { var entity = new CohereEmbeddingsRequestEntity(List.of("abc"), CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index d3783f6fed76b..d106274280ed5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -21,7 +21,6 @@ import org.hamcrest.MatcherAssert; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import java.util.Map; @@ -30,7 +29,7 @@ import static org.hamcrest.Matchers.is; public class CohereEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_UrlDefined() throws URISyntaxException, IOException { + public void testCreateRequest_UrlDefined() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel("url", "secret", CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, null, null, null, null) @@ -50,10 +49,10 @@ public void testCreateRequest_UrlDefined() throws URISyntaxException, IOExceptio ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc")))); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "embedding_types", List.of("float")))); } - public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOException { + public void testCreateRequest_AllOptionsDefined() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -100,7 +99,7 @@ public void testCreateRequest_AllOptionsDefined() throws URISyntaxException, IOE ); } - public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() throws URISyntaxException, IOException { + public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -147,7 +146,7 @@ public void testCreateRequest_InputTypeSearch_EmbeddingTypeInt8_TruncateEnd() th ); } - public void testCreateRequest_TruncateNone() throws URISyntaxException, IOException { + public void testCreateRequest_TruncateNone() throws IOException { var request = createRequest( List.of("abc"), CohereEmbeddingsModelTests.createModel( @@ -175,10 +174,10 @@ public void testCreateRequest_TruncateNone() throws URISyntaxException, IOExcept ); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none"))); + MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "truncate", "none", "embedding_types", List.of("float")))); } - public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) throws URISyntaxException { + public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) { var account = new CohereAccount(model.getServiceSettings().getCommonSettings().getUri(), model.getSecretSettings().apiKey()); return new CohereEmbeddingsRequest(account, input, model); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index dae4c20d00d78..fa21cd9bf7841 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -353,7 +354,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWit public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.INT8), + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", DenseVectorFieldMapper.ElementType.BYTE), getTaskSettingsMap(InputType.SEARCH, CohereTruncation.NONE), getSecretSettingsMap("secret") ); @@ -371,7 +372,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.INT8)); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.BYTE)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, CohereTruncation.NONE)) @@ -684,7 +685,10 @@ public void testInfer_SendsRequest() throws IOException { MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -838,7 +842,10 @@ public void testInfer_SetsInputTypeToIngest_FromInferParameter_WhenTaskSettingsA MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -905,7 +912,10 @@ public void testInfer_SetsInputTypeToIngestFromInferParameter_WhenModelSettingIs MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "input_type", "search_document", "embedding_types", List.of("float"))) + ); } } @@ -965,7 +975,10 @@ public void testInfer_DoesNotSetInputType_WhenNotPresentInTaskSettings_AndUnspec MatcherAssert.assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - MatcherAssert.assertThat(requestMap, is(Map.of("texts", List.of("abc"), "model", "model"))); + MatcherAssert.assertThat( + requestMap, + is(Map.of("texts", List.of("abc"), "model", "model", "embedding_types", List.of("float"))) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java new file mode 100644 index 0000000000000..ed13e5a87e71b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingTypeTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.cohere.embeddings; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class CohereEmbeddingTypeTests extends ESTestCase { + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingByte() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BYTE, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsInt8_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingInt8() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.INT8, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.INT8) + ); + } + + public void testTranslateToVersion_ReturnsFloat_WhenVersionIsBeforeByteEnumAddition_WhenSpecifyingFloat() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.FLOAT, new TransportVersion(8_613_00_0)), + is(CohereEmbeddingType.FLOAT) + ); + } + + public void testTranslateToVersion_ReturnsByte_WhenVersionOnByteEnumAddition_WhenSpecifyingByte() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.BYTE, TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED), + is(CohereEmbeddingType.BYTE) + ); + } + + public void testTranslateToVersion_ReturnsFloat_WhenVersionOnByteEnumAddition_WhenSpecifyingFloat() { + assertThat( + CohereEmbeddingType.translateToVersion(CohereEmbeddingType.FLOAT, TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED), + is(CohereEmbeddingType.FLOAT) + ); + } + + public void testFromElementType_CovertsFloatToCohereEmbeddingTypeFloat() { + assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.FLOAT), is(CohereEmbeddingType.FLOAT)); + } + + public void testFromElementType_CovertsByteToCohereEmbeddingTypeByte() { + assertThat(CohereEmbeddingType.fromElementType(DenseVectorFieldMapper.ElementType.BYTE), is(CohereEmbeddingType.BYTE)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java index ec36ac5ce58d5..194b71f9ea32a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -18,6 +18,7 @@ import org.hamcrest.MatcherAssert; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.hamcrest.Matchers.is; @@ -218,7 +219,7 @@ public static CohereEmbeddingsModel createModel( "service", new CohereEmbeddingsServiceSettings( new CohereServiceSettings(url, SimilarityMeasure.DOT_PRODUCT, dimensions, tokenLimit, model), - embeddingType + Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) ), taskSettings, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 41906cca15fe9..f6419c9405e4b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; @@ -37,7 +38,7 @@ public class CohereEmbeddingsServiceSettingsTests extends AbstractWireSerializingTestCase { public static CohereEmbeddingsServiceSettings createRandom() { var commonSettings = CohereServiceSettingsTests.createRandom(); - var embeddingType = randomBoolean() ? randomFrom(CohereEmbeddingType.values()) : null; + var embeddingType = randomFrom(CohereEmbeddingType.values()); return new CohereEmbeddingsServiceSettings(commonSettings, embeddingType); } @@ -62,7 +63,7 @@ public void testFromMap() { CohereServiceSettings.OLD_MODEL_ID_FIELD, model, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, - CohereEmbeddingType.INT8.toString() + DenseVectorFieldMapper.ElementType.BYTE.toString() ) ), ConfigurationParseContext.PERSISTENT @@ -73,7 +74,7 @@ public void testFromMap() { is( new CohereEmbeddingsServiceSettings( new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), - CohereEmbeddingType.INT8 + CohereEmbeddingType.BYTE ) ) ); @@ -102,7 +103,7 @@ public void testFromMap_WithModelId() { CohereEmbeddingType.INT8.toString() ) ), - ConfigurationParseContext.PERSISTENT + ConfigurationParseContext.REQUEST ); MatcherAssert.assertThat( @@ -138,7 +139,7 @@ public void testFromMap_PrefersModelId_OverModel() { CohereServiceSettings.MODEL_ID, model, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, - CohereEmbeddingType.INT8.toString() + CohereEmbeddingType.BYTE.toString() ) ), ConfigurationParseContext.PERSISTENT @@ -149,15 +150,15 @@ public void testFromMap_PrefersModelId_OverModel() { is( new CohereEmbeddingsServiceSettings( new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), - CohereEmbeddingType.INT8 + CohereEmbeddingType.BYTE ) ) ); } - public void testFromMap_MissingEmbeddingType_DoesNotThrowException() { + public void testFromMap_MissingEmbeddingType_DefaultsToFloat() { var serviceSettings = CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of()), ConfigurationParseContext.PERSISTENT); - assertNull(serviceSettings.getEmbeddingType()); + assertThat(serviceSettings.getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); } public void testFromMap_EmptyEmbeddingType_ThrowsError() { @@ -180,12 +181,12 @@ public void testFromMap_EmptyEmbeddingType_ThrowsError() { ); } - public void testFromMap_InvalidEmbeddingType_ThrowsError() { + public void testFromMap_InvalidEmbeddingType_ThrowsError_ForRequest() { var thrownException = expectThrows( ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc")), - ConfigurationParseContext.PERSISTENT + ConfigurationParseContext.REQUEST ) ); @@ -193,17 +194,18 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError() { thrownException.getMessage(), is( Strings.format( - "Validation Failed: 1: [service_settings] Invalid value [abc] received. [embedding_type] must be one of [float, int8];" + "Validation Failed: 1: [service_settings] Invalid value [abc] received. " + + "[embedding_type] must be one of [byte, float, int8];" ) ) ); } - public void testFromMap_InvalidEmbeddingType_ThrowsError_WhenByteFromPersistedConfig() { + public void testFromMap_InvalidEmbeddingType_ThrowsError_ForPersistent() { var thrownException = expectThrows( ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE_BYTE)), + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc")), ConfigurationParseContext.PERSISTENT ) ); @@ -212,7 +214,8 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_WhenByteFromPersistedCo thrownException.getMessage(), is( Strings.format( - "Validation Failed: 1: [service_settings] Invalid value [byte] received. [embedding_type] must be one of [float, int8];" + "Validation Failed: 1: [service_settings] Invalid value [abc] received. " + + "[embedding_type] must be one of [byte, float];" ) ) ); @@ -233,10 +236,35 @@ public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { ); } - public void testFromMap_ConvertsCohereEmbeddingType_FromByteToInt8() { + public void testFromMap_ConvertsElementTypeByte_ToCohereEmbeddingTypeByte() { + assertThat( + CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, DenseVectorFieldMapper.ElementType.BYTE.toString())), + ConfigurationParseContext.PERSISTENT + ), + is(new CohereEmbeddingsServiceSettings(new CohereServiceSettings((URI) null, null, null, null, null), CohereEmbeddingType.BYTE)) + ); + } + + public void testFromMap_ConvertsElementTypeFloat_ToCohereEmbeddingTypeFloat() { + assertThat( + CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, DenseVectorFieldMapper.ElementType.FLOAT.toString())), + ConfigurationParseContext.PERSISTENT + ), + is( + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings((URI) null, null, null, null, null), + CohereEmbeddingType.FLOAT + ) + ) + ); + } + + public void testFromMap_ConvertsInt8_ToCohereEmbeddingTypeInt8() { assertThat( CohereEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE_BYTE)), + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingType.INT8.toString())), ConfigurationParseContext.REQUEST ), is(new CohereEmbeddingsServiceSettings(new CohereServiceSettings((URI) null, null, null, null, null), CohereEmbeddingType.INT8)) @@ -281,11 +309,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry(entries); } - public static Map getServiceSettingsMap( - @Nullable String url, - @Nullable String model, - @Nullable CohereEmbeddingType embeddingType - ) { + public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model, @Nullable Enum embeddingType) { var map = new HashMap<>(CohereServiceSettingsTests.getServiceSettingsMap(url, model)); if (embeddingType != null) { From 3e406e2d5714dd553f4037866ae9d8f4116c8030 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 27 Mar 2024 13:47:09 +0100 Subject: [PATCH 208/214] ES|QL: Improve support for TEXT fields in functions (#106810) Re-submitting https://github.com/elastic/elasticsearch/pull/106688 after a revert due to a conflict after merge --- docs/changelog/106810.yaml | 5 + .../description/date_format.asciidoc | 5 + .../functions/layout/date_format.asciidoc | 14 +++ .../functions/parameters/date_format.asciidoc | 7 ++ .../esql/functions/signature/date_format.svg | 1 + .../functions/types/date_extract.asciidoc | 1 + .../esql/functions/types/date_format.asciidoc | 10 ++ .../esql/functions/types/date_parse.asciidoc | 1 + .../src/main/resources/meta.csv-spec | 18 +-- .../src/main/resources/string.csv-spec | 8 ++ .../esql/expression/EsqlTypeResolutions.java | 9 ++ .../function/scalar/date/DateExtract.java | 7 +- .../function/scalar/date/DateFormat.java | 19 ++-- .../function/scalar/date/DateParse.java | 7 +- .../function/scalar/ip/CIDRMatch.java | 11 +- .../function/scalar/string/Split.java | 5 +- .../function/AbstractFunctionTestCase.java | 1 + .../AbstractScalarFunctionTestCase.java | 3 + .../scalar/date/DateExtractTests.java | 12 ++ .../function/scalar/date/DateFormatTests.java | 79 ++++++++++++++ .../function/scalar/date/DateParseTests.java | 12 ++ .../function/scalar/ip/CIDRMatchTests.java | 103 ++++++++++++++++++ .../rest-api-spec/test/esql/100_bug_fix.yml | 61 +++++++++++ .../rest-api-spec/test/esql/80_text.yml | 20 ++++ 24 files changed, 383 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/106810.yaml create mode 100644 docs/reference/esql/functions/description/date_format.asciidoc create mode 100644 docs/reference/esql/functions/layout/date_format.asciidoc create mode 100644 docs/reference/esql/functions/parameters/date_format.asciidoc create mode 100644 docs/reference/esql/functions/signature/date_format.svg create mode 100644 docs/reference/esql/functions/types/date_format.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java diff --git a/docs/changelog/106810.yaml b/docs/changelog/106810.yaml new file mode 100644 index 0000000000000..e93e5cf1e5361 --- /dev/null +++ b/docs/changelog/106810.yaml @@ -0,0 +1,5 @@ +pr: 106810 +summary: "ES|QL: Improve support for TEXT fields in functions" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/description/date_format.asciidoc b/docs/reference/esql/functions/description/date_format.asciidoc new file mode 100644 index 0000000000000..ef9873bdeffe6 --- /dev/null +++ b/docs/reference/esql/functions/description/date_format.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns a string representation of a date, in the provided format. diff --git a/docs/reference/esql/functions/layout/date_format.asciidoc b/docs/reference/esql/functions/layout/date_format.asciidoc new file mode 100644 index 0000000000000..1f9199afc812c --- /dev/null +++ b/docs/reference/esql/functions/layout/date_format.asciidoc @@ -0,0 +1,14 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-date_format]] +=== `DATE_FORMAT` + +*Syntax* + +[.text-center] +image::esql/functions/signature/date_format.svg[Embedded,opts=inline] + +include::../parameters/date_format.asciidoc[] +include::../description/date_format.asciidoc[] +include::../types/date_format.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc new file mode 100644 index 0000000000000..7b000418b961c --- /dev/null +++ b/docs/reference/esql/functions/parameters/date_format.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`dateFormat`:: +A valid date pattern + +`date`:: +Date expression diff --git a/docs/reference/esql/functions/signature/date_format.svg b/docs/reference/esql/functions/signature/date_format.svg new file mode 100644 index 0000000000000..961fcff51d42b --- /dev/null +++ b/docs/reference/esql/functions/signature/date_format.svg @@ -0,0 +1 @@ +DATE_FORMAT(dateFormat,date) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/date_extract.asciidoc b/docs/reference/esql/functions/types/date_extract.asciidoc index 08bc0f6b51357..43702ef0671a7 100644 --- a/docs/reference/esql/functions/types/date_extract.asciidoc +++ b/docs/reference/esql/functions/types/date_extract.asciidoc @@ -6,4 +6,5 @@ |=== datePart | date | result keyword | datetime | long +text | datetime | long |=== diff --git a/docs/reference/esql/functions/types/date_format.asciidoc b/docs/reference/esql/functions/types/date_format.asciidoc new file mode 100644 index 0000000000000..a76f38653b9b8 --- /dev/null +++ b/docs/reference/esql/functions/types/date_format.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +dateFormat | date | result +keyword | datetime | keyword +text | datetime | keyword +|=== diff --git a/docs/reference/esql/functions/types/date_parse.asciidoc b/docs/reference/esql/functions/types/date_parse.asciidoc index 0d9e4b30c7c7b..82ae8253baa26 100644 --- a/docs/reference/esql/functions/types/date_parse.asciidoc +++ b/docs/reference/esql/functions/types/date_parse.asciidoc @@ -7,4 +7,5 @@ datePattern | dateString | result keyword | keyword | datetime keyword | text | datetime +text | text | datetime |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index cd94ae793516e..7d1617b208f34 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -12,7 +12,7 @@ auto_bucket |"double|date auto_bucket(field:integer|long|double|dat avg |"double avg(number:double|integer|long)" |number |"double|integer|long" | "" |double | "The average of a numeric field." | false | false | true case |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |[condition, trueValue] |["boolean", "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true." | [false, false] | true | false ceil |"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | "double|integer|long|unsigned_long" | "Round a number up to the nearest integer." | false | false | false -cidr_match |boolean cidr_match(ip:ip, blockX...:keyword) |[ip, blockX] |[ip, keyword] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false +cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false @@ -20,10 +20,10 @@ cosh |"double cosh(number:double|integer|long|unsigned_long) count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false -date_extract |long date_extract(datePart:keyword, date:date) |[datePart, date] |[keyword, date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false -date_format |keyword date_format(?dateFormat:keyword, date:date) |[dateFormat, date] |[keyword, date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false -date_parse |"date date_parse(?datePattern:keyword, dateString:keyword|text)"|[datePattern, dateString]|["keyword", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false -date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |[keyword, date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false +date_extract |"long date_extract(datePart:keyword|text, date:date)" |[datePart, date] |["keyword|text", date] |["Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; day_of_year; epoch_day; era; hour_of_ampm; hour_of_day; instant_seconds; micro_of_day; micro_of_second; milli_of_day; milli_of_second; minute_of_day; minute_of_hour; month_of_year; nano_of_day; nano_of_second; offset_seconds; proleptic_month; second_of_day; second_of_minute; year; or year_of_era.", "Date expression"] |long | "Extracts parts of a date, like year, month, day, hour." | [false, false] | false | false +date_format |"keyword date_format(?dateFormat:keyword|text, date:date)" |[dateFormat, date] |["keyword|text", date] |["A valid date pattern", "Date expression"] |keyword | "Returns a string representation of a date, in the provided format." | [true, false] | false | false +date_parse |"date date_parse(?datePattern:keyword|text, dateString:keyword|text)"|[datePattern, dateString]|["keyword|text", "keyword|text"]|["A valid date pattern", "A string representing a date"]|date |Parses a string into a date value | [true, false] | false | false +date_trunc |"date date_trunc(interval:keyword, date:date)" |[interval, date] |["keyword", date] |["Interval; expressed using the timespan literal syntax.", "Date expression"] |date | "Rounds down a date to the closest interval." | [false, false] | false | false e |double e() | null | null | null |double | "Euler’s number." | null | false | false ends_with |"boolean ends_with(str:keyword|text, suffix:keyword|text)" |[str, suffix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string ends with another string" | [false, false] | false | false floor |"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Round a number down to the nearest integer." | false | false | false @@ -116,7 +116,7 @@ synopsis:keyword "double avg(number:double|integer|long)" "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" -boolean cidr_match(ip:ip, blockX...:keyword) +"boolean cidr_match(ip:ip, blockX...:keyword|text)" "boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" "keyword concat(string1:keyword|text, string2...:keyword|text)" "double cos(number:double|integer|long|unsigned_long)" @@ -124,9 +124,9 @@ boolean cidr_match(ip:ip, blockX...:keyword) "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -long date_extract(datePart:keyword, date:date) -keyword date_format(?dateFormat:keyword, date:date) -"date date_parse(?datePattern:keyword, dateString:keyword|text)" +"long date_extract(datePart:keyword|text, date:date)" +"keyword date_format(?dateFormat:keyword|text, date:date)" +"date date_parse(?datePattern:keyword|text, dateString:keyword|text)" "date date_trunc(interval:keyword, date:date)" double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 06fca2682bbb9..d9c9e535c2c45 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -1160,3 +1160,11 @@ required_feature: esql.agg_values null | null // end::values-grouped-result[] ; + + +splitBasedOnField +from employees | where emp_no == 10001 | eval split = split("fooMbar", gender) | keep gender, split; + +gender:keyword | split:keyword +M | [foo, bar] +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index 0379f1a5d3614..85d5357d7c1ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -27,6 +27,15 @@ public class EsqlTypeResolutions { + public static Expression.TypeResolution isStringAndExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { + Expression.TypeResolution resolution = TypeResolutions.isString(e, operationName, paramOrd); + if (resolution.unresolved()) { + return resolution; + } + + return isExact(e, operationName, paramOrd); + } + public static Expression.TypeResolution isExact(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { if (e instanceof FieldAttribute fa) { if (DataTypes.isString(fa.dataType())) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 4f31f73963569..544f284791919 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -28,10 +29,10 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.EsqlConverter.STRING_TO_CHRONO_FIELD; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.chronoToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateExtract extends EsqlConfigurationFunction { @@ -42,7 +43,7 @@ public DateExtract( Source source, // Need to replace the commas in the description here with semi-colon as there's a bug in the CSV parser // used in the CSVTests and fixing it is not trivial - @Param(name = "datePart", type = { "keyword" }, description = """ + @Param(name = "datePart", type = { "keyword", "text" }, description = """ Part of the date to extract. Can be: aligned_day_of_week_in_month; aligned_day_of_week_in_year; aligned_week_of_month; aligned_week_of_year; ampm_of_day; clock_hour_of_ampm; clock_hour_of_day; day_of_month; day_of_week; @@ -76,7 +77,7 @@ private ChronoField chronoField() { if (chronoField == null) { Expression field = children().get(0); try { - if (field.foldable() && field.dataType() == DataTypes.KEYWORD) { + if (field.foldable() && EsqlDataTypes.isString(field.dataType())) { chronoField = (ChronoField) STRING_TO_CHRONO_FIELD.convert(field.fold()); } } catch (Exception e) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 85e8a0f3aec47..6a6e523f81974 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; import org.elasticsearch.xpack.ql.session.Configuration; @@ -28,12 +29,12 @@ import java.util.Locale; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; public class DateFormat extends EsqlConfigurationFunction implements OptionalArgument { @@ -43,7 +44,7 @@ public class DateFormat extends EsqlConfigurationFunction implements OptionalArg @FunctionInfo(returnType = "keyword", description = "Returns a string representation of a date, in the provided format.") public DateFormat( Source source, - @Param(optional = true, name = "dateFormat", type = { "keyword" }, description = "A valid date pattern") Expression format, + @Param(optional = true, name = "dateFormat", type = { "keyword", "text" }, description = "A valid date pattern") Expression format, @Param(name = "date", type = { "date" }, description = "Date expression") Expression date, Configuration configuration ) { @@ -96,23 +97,17 @@ static BytesRef process(long val, BytesRef formatter, @Fixed Locale locale) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_DATE_TIME_FORMATTER, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_DATE_TIME_FORMATTER); } - if (format.dataType() != DataTypes.KEYWORD) { + if (EsqlDataTypes.isString(format.dataType()) == false) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return new DateFormatConstantEvaluator.Factory(source(), fieldEvaluator, formatter); } var formatEvaluator = toEvaluator.apply(format); - return dvrCtx -> new DateFormatEvaluator( - source(), - fieldEvaluator.get(dvrCtx), - formatEvaluator.get(dvrCtx), - ((EsqlConfiguration) configuration()).locale(), - dvrCtx - ); + return new DateFormatEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, ((EsqlConfiguration) configuration()).locale()); } private static DateFormatter toFormatter(Object format, Locale locale) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 0bb9a5dde1959..b356dbccbeb4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; @@ -28,12 +29,12 @@ import java.util.function.Function; import static org.elasticsearch.common.time.DateFormatter.forPattern; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC; public class DateParse extends EsqlScalarFunction implements OptionalArgument { @@ -44,7 +45,7 @@ public class DateParse extends EsqlScalarFunction implements OptionalArgument { @FunctionInfo(returnType = "date", description = "Parses a string into a date value") public DateParse( Source source, - @Param(name = "datePattern", type = { "keyword" }, description = "A valid date pattern", optional = true) Expression first, + @Param(name = "datePattern", type = { "keyword", "text" }, description = "A valid date pattern", optional = true) Expression first, @Param(name = "dateString", type = { "keyword", "text" }, description = "A string representing a date") Expression second ) { super(source, second != null ? List.of(first, second) : List.of(first)); @@ -99,7 +100,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function matches + @Param(name = "blockX", type = { "keyword", "text" }, description = "CIDR block to test the IP against.") List matches ) { super(source, CollectionUtils.combine(singletonList(ipField), matches)); this.ipField = ipField; @@ -76,11 +76,10 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); - return dvrCtx -> new CIDRMatchEvaluator( + return new CIDRMatchEvaluator.Factory( source(), - ipEvaluatorSupplier.get(dvrCtx), - matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx + ipEvaluatorSupplier, + matches.stream().map(x -> toEvaluator.apply(x)).toArray(EvalOperator.ExpressionEvaluator.Factory[]::new) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 4ecc7fa1a96a7..611fc9947d3db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -25,10 +25,9 @@ import java.util.function.Function; +import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isStringAndExact; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isString; -import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; /** * Splits a string on some delimiter into a multivalued string field. @@ -59,7 +58,7 @@ protected TypeResolution resolveType() { return resolution; } - return isString(right(), sourceText(), SECOND); + return isStringAndExact(right(), sourceText(), SECOND); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 755e5fcf25b9b..78c1c57e07782 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -896,6 +896,7 @@ protected static String typeErrorMessage(boolean includeOrdinal, List validTypes) { if (withoutNull.equals(List.of(DataTypes.DATETIME))) { return "datetime"; } + if (withoutNull.equals(List.of(DataTypes.IP))) { + return "ip"; + } List negations = Stream.concat(Stream.of(numerics()), Stream.of(EsqlDataTypes.DATE_PERIOD, EsqlDataTypes.TIME_DURATION)) .sorted(Comparator.comparing(DataType::name)) .toList(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 3a6a5d8eabae3..1e2c24062b07a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -53,6 +53,18 @@ public static Iterable parameters() { equalTo(2023L) ) ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("YeAr"), DataTypes.TEXT, "chrono"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "date") + ), + "DateExtractEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", + DataTypes.LONG, + equalTo(2023L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.DATETIME), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java new file mode 100644 index 0000000000000..3fa28c566649e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class DateFormatTests extends AbstractScalarFunctionTestCase { + public DateFormatTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + List.of(DataTypes.KEYWORD, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.KEYWORD, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.TEXT, DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy"), DataTypes.TEXT, "formatter"), + new TestCaseSupplier.TypedData(1687944333000L, DataTypes.DATETIME, "val") + ), + "DateFormatEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], locale=en_US]", + DataTypes.KEYWORD, + equalTo(BytesRefs.toBytesRef("2023")) + ) + ) + ) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new DateFormat(source, args.get(0), args.get(1), EsqlTestUtils.TEST_CFG); + } + + @Override + protected List argSpec() { + return List.of(required(strings()), required(DataTypes.DATETIME)); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.KEYWORD; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index 540d1aa34474b..c7a1a945e079e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -62,6 +62,18 @@ public static Iterable parameters() { equalTo(1683244800000L) ) ), + new TestCaseSupplier( + "With Both Text", + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef("yyyy-MM-dd"), DataTypes.TEXT, "second"), + new TestCaseSupplier.TypedData(new BytesRef("2023-05-05"), DataTypes.TEXT, "first") + ), + "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0], zoneId=Z]", + DataTypes.DATETIME, + equalTo(1683244800000L) + ) + ), new TestCaseSupplier( List.of(DataTypes.KEYWORD, DataTypes.KEYWORD), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java new file mode 100644 index 0000000000000..fbeb824697178 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchTests.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.ip; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class CIDRMatchTests extends AbstractScalarFunctionTestCase { + public CIDRMatchTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + + var suppliers = List.of( + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("192.168.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(true) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.KEYWORD), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.KEYWORD, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ), + new TestCaseSupplier( + List.of(DataTypes.IP, DataTypes.TEXT), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(EsqlDataTypeConverter.stringToIP("192.168.0.10"), DataTypes.IP, "ip"), + new TestCaseSupplier.TypedData(new BytesRef("10.0.0.0/16"), DataTypes.TEXT, "cidrs") + ), + "CIDRMatchEvaluator[ip=Attribute[channel=0], cidrs=[Attribute[channel=1]]]", + DataTypes.BOOLEAN, + equalTo(false) + ) + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new CIDRMatch(source, args.get(0), List.of(args.get(1))); + } + + @Override + protected List argSpec() { + return List.of(required(DataTypes.IP), required(strings())); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.BOOLEAN; + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 44d7290cbc002..c72315312afce 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -193,3 +193,64 @@ - match: { columns.0.type: double } - length: { values: 1 } - match: { values.0.0: 30.0 } + + + +--- +"text in functions #105379": + - skip: + version: " - 8.13.99" + reason: "fixes in 8.13 or later" + - do: + indices.create: + index: idx_with_date_ip_txt + body: + mappings: + properties: + id: + type: long + date: + type: date + ip: + type: ip + text: + type: text + text2: + type: text + + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 1, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text":"yyyy-MM-dd", "text2":"year" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 2, "date": "2024-03-22T14:50:00.000Z", "ip": "192.168.0.10", "text": "192.168.0.0/16" } + - { "index": { "_index": "idx_with_date_ip_txt" } } + - { "id": 3, "date": "2024-03-22T14:50:00.000Z", "ip": "10.0.0.10", "text": "192.168.0.0/16" } + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id == 1 | eval x = date_format(text, date), y = date_extract(text2, date), p = date_parse(text, "2024-03-14") | keep x, y, p | limit 1' + - match: { columns.0.name: x } + - match: { columns.0.type: keyword } + - match: { columns.1.name: y } + - match: { columns.1.type: long } + - length: { values: 1 } + - match: { values.0.0: "2024-03-22" } + - match: { values.0.1: 2024 } + - match: { values.0.2: "2024-03-14T00:00:00.000Z" } + + - do: + esql.query: + body: + query: 'from idx_with_date_ip_txt | where id > 1 | eval x = cidr_match(ip, text) | sort id | keep id, x | limit 2' + - match: { columns.0.name: id } + - match: { columns.0.type: long } + - match: { columns.1.name: x } + - match: { columns.1.type: boolean } + - length: { values: 2 } + - match: { values.0.0: 2 } + - match: { values.0.1: true } + - match: { values.1.0: 3 } + - match: { values.1.1: false } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index d73efe1788ce3..fba68760a162f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -366,6 +366,26 @@ setup: - match: { values.0: [ "Jenny - IT Director"] } - match: { values.1: [ "John - Payroll Specialist"] } +--- +"split text": + - skip: + version: " - 8.13.99" + reason: "functions fixed for text in v 8.14" + features: allowed_warnings_regex + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | sort emp_no | eval split = split(tag, " ") | keep split' + + - match: { columns.0.name: "split" } + - match: { columns.0.type: "keyword" } + + - length: { values: 2 } + - match: { values.0: [ ["foo", "bar"] ] } + - match: { values.1: [ "baz"] } + --- "stats text with raw": From a2af99cb0041c429390f9f8ad61b18db07716617 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Wed, 27 Mar 2024 15:07:56 +0200 Subject: [PATCH 209/214] Add metric for total downsampling latency (#106747) * Add DownsampleMetrics * replace singleton with injection * add comment * add comment * fix test * Metric for total downsampling latency * small fixes * make startTime a local variable --- .../xpack/downsample/DownsampleMetrics.java | 16 ++- .../downsample/DownsampleShardIndexer.java | 6 +- .../downsample/TransportDownsampleAction.java | 112 +++++++++++++----- .../DownsampleActionSingleNodeTests.java | 9 ++ 4 files changed, 108 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java index 576f40a8190f3..797b89ecf11a0 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -30,6 +30,7 @@ public class DownsampleMetrics extends AbstractLifecycleComponent { public static final String LATENCY_SHARD = "es.tsdb.downsample.latency.shard.histogram"; + public static final String LATENCY_TOTAL = "es.tsdb.downsample.latency.total.histogram"; private final MeterRegistry meterRegistry; @@ -41,6 +42,7 @@ public DownsampleMetrics(MeterRegistry meterRegistry) { protected void doStart() { // Register all metrics to track. meterRegistry.registerLongHistogram(LATENCY_SHARD, "Downsampling action latency per shard", "ms"); + meterRegistry.registerLongHistogram(LATENCY_TOTAL, "Downsampling latency end-to-end", "ms"); } @Override @@ -49,17 +51,17 @@ protected void doStop() {} @Override protected void doClose() throws IOException {} - enum ShardActionStatus { + enum ActionStatus { SUCCESS("success"), MISSING_DOCS("missing_docs"), FAILED("failed"); - public static final String NAME = "status"; + static final String NAME = "status"; private final String message; - ShardActionStatus(String message) { + ActionStatus(String message) { this.message = message; } @@ -68,7 +70,11 @@ String getMessage() { } } - void recordLatencyShard(long durationInMilliSeconds, ShardActionStatus status) { - meterRegistry.getLongHistogram(LATENCY_SHARD).record(durationInMilliSeconds, Map.of(ShardActionStatus.NAME, status.getMessage())); + void recordLatencyShard(long durationInMilliSeconds, ActionStatus status) { + meterRegistry.getLongHistogram(LATENCY_SHARD).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); + } + + void recordLatencyTotal(long durationInMilliSeconds, ActionStatus status) { + meterRegistry.getLongHistogram(LATENCY_TOTAL).record(durationInMilliSeconds, Map.of(ActionStatus.NAME, status.getMessage())); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 773dfbe897b50..59c1c9c38efae 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -191,7 +191,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumSent() + "]"; logger.info(error); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.MISSING_DOCS); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.MISSING_DOCS); throw new DownsampleShardIndexerException(error, false); } @@ -204,7 +204,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept + task.getNumFailed() + "]"; logger.info(error); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.FAILED); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.FAILED); throw new DownsampleShardIndexerException(error, false); } @@ -214,7 +214,7 @@ public DownsampleIndexerAction.ShardDownsampleResponse execute() throws IOExcept ActionListener.noop() ); logger.info("Downsampling task [" + task.getPersistentTaskId() + " on shard " + indexShard.shardId() + " completed"); - downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ShardActionStatus.SUCCESS); + downsampleMetrics.recordLatencyShard(duration.millis(), DownsampleMetrics.ActionStatus.SUCCESS); return new DownsampleIndexerAction.ShardDownsampleResponse(indexShard.shardId(), task.getNumIndexed()); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 58401451fa86b..c526561999497 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -91,6 +91,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; @@ -115,6 +116,7 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc private final IndexScopedSettings indexScopedSettings; private final ThreadContext threadContext; private final PersistentTasksService persistentTasksService; + private final DownsampleMetrics downsampleMetrics; private static final Set FORBIDDEN_SETTINGS = Set.of( IndexSettings.DEFAULT_PIPELINE.getKey(), @@ -153,7 +155,8 @@ public TransportDownsampleAction( ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings, - PersistentTasksService persistentTasksService + PersistentTasksService persistentTasksService, + DownsampleMetrics downsampleMetrics ) { super( DownsampleAction.NAME, @@ -173,6 +176,21 @@ public TransportDownsampleAction( this.threadContext = threadPool.getThreadContext(); this.taskQueue = clusterService.createTaskQueue("downsample", Priority.URGENT, STATE_UPDATE_TASK_EXECUTOR); this.persistentTasksService = persistentTasksService; + this.downsampleMetrics = downsampleMetrics; + } + + private void recordLatencyOnSuccess(long startTime) { + downsampleMetrics.recordLatencyTotal( + TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), + DownsampleMetrics.ActionStatus.SUCCESS + ); + } + + private void recordLatencyOnFailure(long startTime) { + downsampleMetrics.recordLatencyTotal( + TimeValue.timeValueMillis(client.threadPool().relativeTimeInMillis() - startTime).getMillis(), + DownsampleMetrics.ActionStatus.FAILED + ); } @Override @@ -182,6 +200,7 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + long startTime = client.threadPool().relativeTimeInMillis(); String sourceIndexName = request.getSourceIndex(); final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); @@ -236,7 +255,7 @@ protected void masterOperation( final TaskId parentTask = new TaskId(clusterService.localNode().getId(), task.getId()); // Short circuit if target index has been downsampled: final String downsampleIndexName = request.getTargetIndex(); - if (canShortCircuit(downsampleIndexName, parentTask, request.getWaitTimeout(), state.metadata(), listener)) { + if (canShortCircuit(downsampleIndexName, parentTask, request.getWaitTimeout(), startTime, state.metadata(), listener)) { logger.info("Skipping downsampling, because a previous execution already completed downsampling"); return; } @@ -325,6 +344,7 @@ protected void masterOperation( sourceIndexMetadata, downsampleIndexName, parentTask, + startTime, metricFields, labelFields, dimensionFields @@ -335,7 +355,14 @@ protected void masterOperation( }, e -> { if (e instanceof ResourceAlreadyExistsException) { var metadata = clusterService.state().metadata(); - if (canShortCircuit(request.getTargetIndex(), parentTask, request.getWaitTimeout(), metadata, listener)) { + if (canShortCircuit( + request.getTargetIndex(), + parentTask, + request.getWaitTimeout(), + startTime, + metadata, + listener + )) { logger.info("Downsample tasks are not created, because a previous execution already completed downsampling"); return; } @@ -345,6 +372,7 @@ protected void masterOperation( sourceIndexMetadata, downsampleIndexName, parentTask, + startTime, metricFields, labelFields, dimensionFields @@ -364,6 +392,7 @@ private boolean canShortCircuit( String targetIndexName, TaskId parentTask, TimeValue waitTimeout, + long startTime, Metadata metadata, ActionListener listener ) { @@ -391,7 +420,13 @@ private boolean canShortCircuit( .indices() .refresh( refreshRequest, - new RefreshDownsampleIndexActionListener(listener, parentTask, targetIndexMetadata.getIndex().getName(), waitTimeout) + new RefreshDownsampleIndexActionListener( + listener, + parentTask, + targetIndexMetadata.getIndex().getName(), + waitTimeout, + startTime + ) ); return true; } @@ -405,6 +440,7 @@ private void performShardDownsampling( IndexMetadata sourceIndexMetadata, String downsampleIndexName, TaskId parentTask, + long startTime, List metricFields, List labelFields, List dimensionFields @@ -414,6 +450,7 @@ private void performShardDownsampling( // NOTE: before we set the number of replicas to 0, as a result here we are // only dealing with primary shards. final AtomicInteger countDown = new AtomicInteger(numberOfShards); + final AtomicBoolean errorReported = new AtomicBoolean(false); for (int shardNum = 0; shardNum < numberOfShards; shardNum++) { final ShardId shardId = new ShardId(sourceIndex, shardNum); final String persistentTaskId = createPersistentTaskId( @@ -458,13 +495,16 @@ public void onResponse(PersistentTasksCustomMetadata.PersistentTask listener, final IndexMetadata sourceIndexMetadata, final String downsampleIndexName, - final TaskId parentTask + final TaskId parentTask, + final long startTime ) { // 4. Make downsample index read-only and set the correct number of replicas final Settings.Builder settings = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true); @@ -527,7 +568,13 @@ private void updateTargetIndexSettingStep( .indices() .updateSettings( updateSettingsReq, - new UpdateDownsampleIndexSettingsActionListener(listener, parentTask, downsampleIndexName, request.getWaitTimeout()) + new UpdateDownsampleIndexSettingsActionListener( + listener, + parentTask, + downsampleIndexName, + request.getWaitTimeout(), + startTime + ) ); } @@ -871,17 +918,20 @@ class UpdateDownsampleIndexSettingsActionListener implements ActionListener listener, final TaskId parentTask, final String downsampleIndexName, - final TimeValue timeout + final TimeValue timeout, + final long startTime ) { this.listener = listener; this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; this.timeout = timeout; + this.startTime = startTime; } @Override @@ -890,11 +940,12 @@ public void onResponse(final AcknowledgedResponse response) { request.setParentTask(parentTask); client.admin() .indices() - .refresh(request, new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, timeout)); + .refresh(request, new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, timeout, startTime)); } @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); // Downsampling has already completed in all shards. listener.onFailure(e); } @@ -909,17 +960,20 @@ class RefreshDownsampleIndexActionListener implements ActionListener actionListener, TaskId parentTask, final String downsampleIndexName, - final TimeValue timeout + final TimeValue timeout, + final long startTime ) { this.actionListener = actionListener; this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; this.timeout = timeout; + this.startTime = startTime; } @Override @@ -930,7 +984,9 @@ public void onResponse(final BroadcastResponse response) { // Mark downsample index as "completed successfully" ("index.downsample.status": "success") taskQueue.submitTask( "update-downsample-metadata [" + downsampleIndexName + "]", - new DownsampleClusterStateUpdateTask(new ForceMergeActionListener(parentTask, downsampleIndexName, actionListener)) { + new DownsampleClusterStateUpdateTask( + new ForceMergeActionListener(parentTask, downsampleIndexName, startTime, actionListener) + ) { @Override public ClusterState execute(ClusterState currentState) { @@ -957,6 +1013,7 @@ public ClusterState execute(ClusterState currentState) { @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); // Downsampling has already completed in all shards. actionListener.onFailure(e); } @@ -970,42 +1027,43 @@ class ForceMergeActionListener implements ActionListener { final ActionListener actionListener; private final TaskId parentTask; private final String downsampleIndexName; + private final long startTime; ForceMergeActionListener( final TaskId parentTask, final String downsampleIndexName, + final long startTime, final ActionListener onFailure ) { this.parentTask = parentTask; this.downsampleIndexName = downsampleIndexName; + this.startTime = startTime; this.actionListener = onFailure; } @Override public void onResponse(final AcknowledgedResponse response) { - /* - * At this point downsample index has been created - * successfully even force merge fails. - * So, we should not fail the downsmaple operation - */ ForceMergeRequest request = new ForceMergeRequest(downsampleIndexName); request.maxNumSegments(1); request.setParentTask(parentTask); - client.admin() - .indices() - .forceMerge(request, ActionListener.wrap(mergeIndexResp -> actionListener.onResponse(AcknowledgedResponse.TRUE), t -> { - /* - * At this point downsample index has been created - * successfully even if force merge failed. - * So, we should not fail the downsample operation. - */ - logger.error("Failed to force-merge downsample index [" + downsampleIndexName + "]", t); - actionListener.onResponse(AcknowledgedResponse.TRUE); - })); + client.admin().indices().forceMerge(request, ActionListener.wrap(mergeIndexResp -> { + actionListener.onResponse(AcknowledgedResponse.TRUE); + recordLatencyOnSuccess(startTime); + }, t -> { + /* + * At this point downsample index has been created + * successfully even if force merge failed. + * So, we should not fail the downsample operation. + */ + logger.error("Failed to force-merge downsample index [" + downsampleIndexName + "]", t); + actionListener.onResponse(AcknowledgedResponse.TRUE); + recordLatencyOnSuccess(startTime); + })); } @Override public void onFailure(Exception e) { + recordLatencyOnSuccess(startTime); this.actionListener.onFailure(e); } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 4c5fdc23e04f9..5e0e6be61b9fc 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -824,6 +824,7 @@ public void testDownsampleStats() throws IOException { final TestTelemetryPlugin plugin = getInstanceFromNode(PluginsService.class).filterPlugins(TestTelemetryPlugin.class) .findFirst() .orElseThrow(); + List measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_SHARD); assertFalse(measurements.isEmpty()); for (Measurement measurement : measurements) { @@ -831,6 +832,14 @@ public void testDownsampleStats() throws IOException { assertEquals(1, measurement.attributes().size()); assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed", "missing_docs"))); } + + measurements = plugin.getLongHistogramMeasurement(DownsampleMetrics.LATENCY_TOTAL); + assertFalse(measurements.isEmpty()); + for (Measurement measurement : measurements) { + assertTrue(measurement.value().toString(), measurement.value().longValue() >= 0 && measurement.value().longValue() < 1000_000); + assertEquals(1, measurement.attributes().size()); + assertThat(measurement.attributes().get("status"), Matchers.in(List.of("success", "failed"))); + } } public void testResumeDownsample() throws IOException { From 86b24ab19a21da1cad1fe7f188527783cf12eca7 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 27 Mar 2024 14:18:33 +0100 Subject: [PATCH 210/214] Add test to exercise reduction of terms aggregation order by key and fix pruning bug (#106799) We are not computing the otherDocCounts properly as we are exiting the iteration too early so we are not counting the pruned buckets. This commit make sure we are counting all buckets. --- docs/changelog/106799.yaml | 5 ++ .../bucket/terms/StringTermsIT.java | 89 +++++++++++++++++++ .../bucket/terms/AbstractInternalTerms.java | 32 +++---- 3 files changed, 106 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/106799.yaml diff --git a/docs/changelog/106799.yaml b/docs/changelog/106799.yaml new file mode 100644 index 0000000000000..c75cd5c15e44b --- /dev/null +++ b/docs/changelog/106799.yaml @@ -0,0 +1,5 @@ +pr: 106799 +summary: Add test to exercise reduction of terms aggregation order by key +area: Aggregations +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 8a2071584b4a0..1b2d66fc12c76 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -49,6 +49,7 @@ import java.util.Map; import java.util.Set; import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; @@ -1287,4 +1288,92 @@ public void testScriptWithValueType() throws Exception { assertThat(ex.getCause().getMessage(), containsString("Unknown value type [foobar]")); } } + + public void testOrderByKey() throws Exception { + Map data = new HashMap<>(); + for (int i = 0; i < 5; i++) { + assertAcked( + indicesAdmin().prepareCreate("idx" + i).setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", "filter", "type=boolean") + ); + List builders = new ArrayList<>(); + for (int j = 0; j < 100; j++) { + String val = "val" + random().nextInt(1000); + boolean filter = randomBoolean(); + long[] counter = data.computeIfAbsent(val, s -> new long[] { 0 }); + if (filter == false) { + counter[0]++; + } + builders.add( + prepareIndex("idx" + i).setSource( + jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, val).field("filter", filter).endObject() + ) + ); + } + indexRandom(true, builders); + } + List allKeys = new ArrayList<>(data.keySet()); + List keysMinDocCount1 = allKeys.stream().filter(key -> data.get(key)[0] > 0).collect(Collectors.toList()); + List keysMinDocCount2 = allKeys.stream().filter(key -> data.get(key)[0] > 1).collect(Collectors.toList()); + // test for different batch sizes to exercise partial reduces + for (int batchReduceSize = 2; batchReduceSize < 6; batchReduceSize++) { + // with min_doc_count = 0 + allKeys.sort(String::compareTo); + assertOrderByKeyResponse(allKeys, data, true, 0, batchReduceSize); + Collections.reverse(allKeys); + assertOrderByKeyResponse(allKeys, data, false, 0, batchReduceSize); + // with min_doc_count = 1 + keysMinDocCount1.sort(String::compareTo); + assertOrderByKeyResponse(keysMinDocCount1, data, true, 1, batchReduceSize); + Collections.reverse(keysMinDocCount1); + assertOrderByKeyResponse(keysMinDocCount1, data, false, 1, batchReduceSize); + // with min_doc_count = 2 + keysMinDocCount2.sort(String::compareTo); + assertOrderByKeyResponse(keysMinDocCount2, data, true, 2, batchReduceSize); + Collections.reverse(keysMinDocCount2); + assertOrderByKeyResponse(keysMinDocCount2, data, false, 2, batchReduceSize); + } + for (int i = 0; i < 5; i++) { + assertAcked(indicesAdmin().prepareDelete("idx" + i)); + } + } + + private void assertOrderByKeyResponse( + List keys, + Map counts, + boolean asc, + int minDocCount, + int batchReduceSize + ) { + int size = randomIntBetween(1, keys.size()); + long sumOtherCount = 0; + for (int i = size; i < keys.size(); i++) { + sumOtherCount += counts.get(keys.get(i))[0]; + } + final long finalSumOtherCount = sumOtherCount; + assertNoFailuresAndResponse( + prepareSearch("idx0", "idx1", "idx2", "idx3", "idx4").setBatchedReduceSize(batchReduceSize) + .setQuery(QueryBuilders.termQuery("filter", false)) + .addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .size(size) + .shardSize(500) + .minDocCount(minDocCount) + .order(BucketOrder.key(asc)) + ), + response -> { + StringTerms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(size)); + assertThat(terms.getSumOfOtherDocCounts(), equalTo(finalSumOtherCount)); + + for (int i = 0; i < size; i++) { + StringTerms.Bucket bucket = terms.getBuckets().get(i); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo(keys.get(i))); + assertThat(bucket.getDocCount(), equalTo(counts.get(keys.get(i))[0])); + } + } + ); + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index af966963e43fc..98f5741ad7440 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -30,7 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.function.Function; +import java.util.function.Consumer; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyAsc; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; @@ -153,7 +153,7 @@ private long getDocCountError(A terms) { private BucketOrder reduceBuckets( List aggregations, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { /* * Buckets returned by a partial reduce or a shard response are sorted by key since {@link Version#V_7_10_0}. @@ -176,7 +176,7 @@ private void reduceMergeSort( List aggregations, BucketOrder thisReduceOrder, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { assert isKeyOrder(thisReduceOrder); final Comparator cmp = thisReduceOrder.comparator(); @@ -201,12 +201,7 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { assert lastBucket == null || cmp.compare(top.current(), lastBucket) >= 0; if (lastBucket != null && cmp.compare(top.current(), lastBucket) != 0) { // the key changed so bundle up the last key's worth of buckets - boolean shouldContinue = sink.apply( - new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets) - ); - if (false == shouldContinue) { - return; - } + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); sameTermBuckets = new ArrayList<>(); } lastBucket = top.current(); @@ -226,14 +221,14 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } if (sameTermBuckets.isEmpty() == false) { - sink.apply(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); } } private void reduceLegacy( List aggregations, AggregationReduceContext reduceContext, - Function, Boolean> sink + Consumer> sink ) { Map> bucketMap = new HashMap<>(); for (InternalAggregation aggregation : aggregations) { @@ -246,12 +241,7 @@ private void reduceLegacy( } } for (List sameTermBuckets : bucketMap.values()) { - boolean shouldContinue = sink.apply( - new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets) - ); - if (false == shouldContinue) { - return; - } + sink.accept(new DelayedBucket<>(AbstractInternalTerms.this::reduceBucket, reduceContext, sameTermBuckets)); } } @@ -304,7 +294,6 @@ public InternalAggregation doReduce(List aggregations, Aggr if (bucket.getDocCount() >= getMinDocCount()) { top.add(bucket); } - return true; }); result = top.build(); } else { @@ -316,8 +305,11 @@ public InternalAggregation doReduce(List aggregations, Aggr boolean canPrune = isKeyOrder(getOrder()) && getMinDocCount() == 0; result = new ArrayList<>(); thisReduceOrder = reduceBuckets(aggregations, reduceContext, bucket -> { - result.add(bucket.reduced()); - return false == canPrune || result.size() < getRequiredSize(); + if (canPrune == false || result.size() < getRequiredSize()) { + result.add(bucket.reduced()); + } else { + otherDocCount[0] += bucket.getDocCount(); + } }); } for (B r : result) { From 6325f2cc32643b4426bc9463885f868a62bdb5de Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 27 Mar 2024 14:46:33 +0100 Subject: [PATCH 211/214] Move KeyComparable interface to AbstractInternalTerms.AbstractTermsBucket (#106806) --- .../bucket/terms/AbstractInternalTerms.java | 11 +++++++---- .../aggregations/bucket/terms/InternalTerms.java | 3 +-- .../analytics/multiterms/InternalMultiTerms.java | 3 +-- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 98f5741ad7440..c423b2ca8cb51 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.InternalOrder; +import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.TopBucketBuilder; import org.elasticsearch.search.aggregations.bucket.IteratorAndCurrent; import org.elasticsearch.search.aggregations.support.SamplingContext; @@ -40,7 +41,7 @@ /** * Base class for terms and multi_terms aggregation that handles common reduce logic */ -public abstract class AbstractInternalTerms, B extends AbstractInternalTerms.AbstractTermsBucket> +public abstract class AbstractInternalTerms, B extends AbstractInternalTerms.AbstractTermsBucket> extends InternalMultiBucketAggregation { public AbstractInternalTerms(String name, Map metadata) { @@ -52,7 +53,9 @@ protected AbstractInternalTerms(StreamInput in) throws IOException { super(in); } - public abstract static class AbstractTermsBucket extends InternalMultiBucketAggregation.InternalBucket { + public abstract static class AbstractTermsBucket> extends InternalMultiBucketAggregation.InternalBucket + implements + KeyComparable { protected abstract void updateDocCountError(long docCountErrorDiff); @@ -353,12 +356,12 @@ protected static XContentBuilder doXContentCommon( Params params, Long docCountError, long otherDocCount, - List buckets + List> buckets ) throws IOException { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); - for (AbstractTermsBucket bucket : buckets) { + for (AbstractTermsBucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 66a3ddb2c94c4..e6e8698e8b568 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalOrder; -import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -29,7 +28,7 @@ public abstract class InternalTerms, B extends Int public static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); public static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); - public abstract static class Bucket> extends AbstractTermsBucket implements Terms.Bucket, KeyComparable { + public abstract static class Bucket> extends AbstractTermsBucket implements Terms.Bucket { /** * Reads a bucket. Should be a constructor reference. */ diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 39d7808571870..3e321d57d877c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -19,7 +19,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalOrder; -import org.elasticsearch.search.aggregations.KeyComparable; import org.elasticsearch.search.aggregations.bucket.terms.AbstractInternalTerms; import org.elasticsearch.xcontent.XContentBuilder; @@ -37,7 +36,7 @@ public class InternalMultiTerms extends AbstractInternalTerms { + public static class Bucket extends AbstractInternalTerms.AbstractTermsBucket { long bucketOrd; From c276b45fae27ab0983a5fce4c8f32fbc19c9f8d9 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Wed, 27 Mar 2024 10:01:53 -0400 Subject: [PATCH 212/214] [Transform] Consolidate permissions checks (#106413) * [Transform] Consolidate permissions checks When we defer permissions checks, unattended Transforms will start and fail immediately with errors related to the internal transform index. The transform will progress beyond the 0th checkpoint, but the search repeatedly fails for missing permissions. Rather than searching and failing, we will reuse the initial permissions check error, which includes the correct permission to set to get the Transform working. The check will happen before the initial search, so it will not progress beyond the 0th checkpoint. Fix #105794 --------- Co-authored-by: Elastic Machine --- docs/changelog/106413.yaml | 6 ++++ .../core/transform/TransformMessages.java | 3 ++ .../TransformInsufficientPermissionsIT.java | 23 +++++++++---- .../transforms/TransformIndexer.java | 34 +++++++++++-------- 4 files changed, 45 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/106413.yaml diff --git a/docs/changelog/106413.yaml b/docs/changelog/106413.yaml new file mode 100644 index 0000000000000..8e13a839bc41e --- /dev/null +++ b/docs/changelog/106413.yaml @@ -0,0 +1,6 @@ +pr: 106413 +summary: Consolidate permissions checks +area: Transform +type: bug +issues: + - 105794 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java index 635ce03879089..6f1fdb8a20cae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java @@ -51,6 +51,9 @@ public class TransformMessages { "Failed to parse transform statistics for transform [{0}]"; public static final String FAILED_TO_LOAD_TRANSFORM_CHECKPOINT = "Failed to load transform checkpoint for transform [{0}]"; public static final String FAILED_TO_LOAD_TRANSFORM_STATE = "Failed to load transform state for transform [{0}]"; + + public static final String TRANSFORM_CANNOT_START_WITHOUT_PERMISSIONS = "Cannot start transform [{0}] because user lacks required " + + "permissions, see privileges_check_failed issue for more details"; public static final String TRANSFORM_CONFIGURATION_BAD_FUNCTION_COUNT = "Transform configuration must specify exactly 1 function"; public static final String TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY = "Pivot transform configuration must specify at least 1 group_by"; public static final String TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION = diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java index 105633c7340e5..d3d86571e002f 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformInsufficientPermissionsIT.java @@ -420,12 +420,15 @@ public void testTransformPermissionsDeferUnattendedNoDest() throws Exception { startTransform(transformId, RequestOptions.DEFAULT); - String destIndexIssue = Strings.format("Could not create destination index [%s] for transform [%s]", destIndexName, transformId); + var permissionIssues = Strings.format( + "org.elasticsearch.ElasticsearchSecurityException: Cannot start transform [%s] because user lacks required permissions, " + + "see privileges_check_failed issue for more details", + transformId + ); // transform's auth state status is still RED due to: // - lacking permissions - // - and the inability to create destination index in the indexer (which is also a consequence of lacking permissions) - // wait for 10 seconds to give the transform indexer enough time to try creating destination index - assertBusy(() -> { assertRed(transformId, authIssue, destIndexIssue); }); + // - and the inability to start the indexer (which is also a consequence of lacking permissions) + assertBusy(() -> { assertRed(transformId, authIssue, permissionIssues); }); // update transform's credentials so that the transform has permission to access source/dest indices updateConfig(transformId, "{}", RequestOptions.DEFAULT.toBuilder().addHeader(AUTH_KEY, Users.SENIOR.header).build()); @@ -440,7 +443,6 @@ public void testTransformPermissionsDeferUnattendedNoDest() throws Exception { * unattended = true * pre-existing dest index = true */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105794") public void testTransformPermissionsDeferUnattendedDest() throws Exception { String transformId = "transform-permissions-defer-unattended-dest-exists"; String sourceIndexName = transformId + "-index"; @@ -467,8 +469,15 @@ public void testTransformPermissionsDeferUnattendedDest() throws Exception { startTransform(config.getId(), RequestOptions.DEFAULT); - // transform's auth state status is still RED, but the health status is GREEN (because dest index exists) - assertRed(transformId, authIssue); + var permissionIssues = Strings.format( + "org.elasticsearch.ElasticsearchSecurityException: Cannot start transform [%s] because user lacks required permissions, " + + "see privileges_check_failed issue for more details", + transformId + ); + // transform's auth state status is still RED due to: + // - lacking permissions + // - and the inability to start the indexer (which is also a consequence of lacking permissions) + assertBusy(() -> { assertRed(transformId, authIssue, permissionIssues); }); // update transform's credentials so that the transform has permission to access source/dest indices updateConfig(transformId, "{}", RequestOptions.DEFAULT.toBuilder().addHeader(AUTH_KEY, Users.SENIOR.header).build()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 38bd231e3e76a..636ed3cc02706 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; @@ -21,6 +22,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.health.HealthStatus; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -271,22 +273,25 @@ protected void onStart(long now, ActionListener listener) { return; } - SetOnce> deducedDestIndexMappings = new SetOnce<>(); - - ActionListener finalListener = ActionListener.wrap(r -> { - try { - // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory - if (context.getPageSize() == 0) { - configurePageSize(getConfig().getSettings().getMaxPageSearchSize()); - } + if (context.getAuthState() != null && HealthStatus.RED.equals(context.getAuthState().getStatus())) { + // AuthorizationState status is RED which means there was permission check error during PUT or _update. + listener.onFailure( + new ElasticsearchSecurityException( + TransformMessages.getMessage(TransformMessages.TRANSFORM_CANNOT_START_WITHOUT_PERMISSIONS, getConfig().getId()) + ) + ); + return; + } - runState = determineRunStateAtStart(); - listener.onResponse(true); - } catch (Exception e) { - listener.onFailure(e); - return; + ActionListener finalListener = listener.delegateFailureAndWrap((l, r) -> { + // if we haven't set the page size yet, if it is set we might have reduced it after running into an out of memory + if (context.getPageSize() == 0) { + configurePageSize(getConfig().getSettings().getMaxPageSearchSize()); } - }, listener::onFailure); + + runState = determineRunStateAtStart(); + l.onResponse(true); + }); // On each run, we need to get the total number of docs and reset the count of processed docs // Since multiple checkpoints can be executed in the task while it is running on the same node, we need to gather @@ -334,6 +339,7 @@ protected void onStart(long now, ActionListener listener) { } }, listener::onFailure); + var deducedDestIndexMappings = new SetOnce>(); var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); From 8f28a7a47a17c503f0d8afdc8d74fc0160731da2 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Wed, 27 Mar 2024 10:23:18 -0400 Subject: [PATCH 213/214] [ML] Allow users to specify similarity field (#106493) * Allow users to specify similarity * Adding l2_norm and e5 fields * Bumping minimum versions for services * Cleaning up * Fixing merge issue --------- Co-authored-by: Elastic Machine --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/SimilarityMeasure.java | 26 ++- .../action/cohere/CohereEmbeddingsAction.java | 2 +- ...ereEmbeddingsExecutableRequestCreator.java | 2 +- .../cohere/CohereEmbeddingsRequest.java | 2 +- .../inference/services/ServiceUtils.java | 19 +- .../services/cohere/CohereService.java | 13 +- .../cohere/CohereServiceSettings.java | 18 +- .../CohereEmbeddingsServiceSettings.java | 11 ++ .../ElasticsearchInternalService.java | 2 +- ...lingualE5SmallInternalServiceSettings.java | 40 ++-- .../huggingface/HuggingFaceService.java | 4 +- .../HuggingFaceServiceSettings.java | 4 +- .../services/openai/OpenAiService.java | 7 +- .../OpenAiEmbeddingsServiceSettings.java | 5 +- .../cohere/CohereEmbeddingsRequestTests.java | 2 +- .../cohere/CohereServiceSettingsTests.java | 7 +- .../services/cohere/CohereServiceTests.java | 178 +++++++++++++++--- .../CohereEmbeddingsModelTests.java | 23 +++ .../ElasticsearchInternalServiceTests.java | 21 ++- .../HuggingFaceServiceSettingsTests.java | 8 +- .../huggingface/HuggingFaceServiceTests.java | 54 ++++++ .../HuggingFaceEmbeddingsModelTests.java | 18 ++ .../services/openai/OpenAiServiceTests.java | 114 ++++++++++- .../OpenAiEmbeddingsServiceSettingsTests.java | 8 +- 25 files changed, 504 insertions(+), 85 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c0970c9a6caa0..64e27bc7ea562 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -153,6 +153,7 @@ static TransportVersion def(int id) { public static final TransportVersion USE_DATA_STREAM_GLOBAL_RETENTION = def(8_613_00_0); public static final TransportVersion ML_COMPLETION_INFERENCE_SERVICE_ADDED = def(8_614_00_0); public static final TransportVersion ML_INFERENCE_EMBEDDING_BYTE_ADDED = def(8_615_00_0); + public static final TransportVersion ML_INFERENCE_L2_NORM_SIMILARITY_ADDED = def(8_616_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java b/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java index cd81cc461bd1d..ff9fedee02fac 100644 --- a/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java +++ b/server/src/main/java/org/elasticsearch/inference/SimilarityMeasure.java @@ -8,11 +8,18 @@ package org.elasticsearch.inference; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; + +import java.util.EnumSet; import java.util.Locale; public enum SimilarityMeasure { COSINE, - DOT_PRODUCT; + DOT_PRODUCT, + L2_NORM; + + private static final EnumSet BEFORE_L2_NORM_ENUMS = EnumSet.range(COSINE, DOT_PRODUCT); @Override public String toString() { @@ -22,4 +29,21 @@ public String toString() { public static SimilarityMeasure fromString(String name) { return valueOf(name.trim().toUpperCase(Locale.ROOT)); } + + /** + * Returns a similarity measure that is known based on the transport version provided. If the similarity enum was not yet + * introduced it will be defaulted to null. + * + * @param similarityMeasure the value to translate if necessary + * @param version the version that dictates the translation + * @return the similarity that is known to the version passed in + */ + public static SimilarityMeasure translateSimilarity(SimilarityMeasure similarityMeasure, TransportVersion version) { + if (version.before(TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED) + && BEFORE_L2_NORM_ENUMS.contains(similarityMeasure) == false) { + return null; + } + + return similarityMeasure; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java index 1f50f0ae6bc57..a49fc85200894 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/cohere/CohereEmbeddingsAction.java @@ -31,7 +31,7 @@ public CohereEmbeddingsAction(Sender sender, CohereEmbeddingsModel model) { Objects.requireNonNull(model); this.sender = Objects.requireNonNull(sender); this.failedToSendRequestErrorMessage = constructFailedToSendRequestMessage( - model.getServiceSettings().getCommonSettings().getUri(), + model.getServiceSettings().getCommonSettings().uri(), "Cohere embeddings" ); requestCreator = new CohereEmbeddingsExecutableRequestCreator(model); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java index b0fdc800a64da..ff4f9847da8a1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsExecutableRequestCreator.java @@ -37,7 +37,7 @@ private static ResponseHandler createEmbeddingsHandler() { public CohereEmbeddingsExecutableRequestCreator(CohereEmbeddingsModel model) { this.model = Objects.requireNonNull(model); - account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().getUri(), this.model.getSecretSettings().apiKey()); + account = new CohereAccount(this.model.getServiceSettings().getCommonSettings().uri(), this.model.getSecretSettings().apiKey()); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java index d788a4667c532..45f25a4dd35f5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -46,7 +46,7 @@ public CohereEmbeddingsRequest(CohereAccount account, List input, Cohere this.input = Objects.requireNonNull(input); uri = buildUri(this.account.url(), "Cohere", CohereEmbeddingsRequest::buildDefaultUri); taskSettings = embeddingsModel.getTaskSettings(); - model = embeddingsModel.getServiceSettings().getCommonSettings().getModelId(); + model = embeddingsModel.getServiceSettings().getCommonSettings().modelId(); embeddingType = embeddingsModel.getServiceSettings().getEmbeddingType(); inferenceEntityId = embeddingsModel.getInferenceEntityId(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index cfbb07cb940e7..96846f3f71142 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -182,17 +182,14 @@ public static SecureString extractRequiredSecureString( } public static SimilarityMeasure extractSimilarity(Map map, String scope, ValidationException validationException) { - String similarity = extractOptionalString(map, SIMILARITY, scope, validationException); - - if (similarity != null) { - try { - return SimilarityMeasure.fromString(similarity); - } catch (IllegalArgumentException iae) { - validationException.addValidationError("[" + scope + "] Unknown similarity measure [" + similarity + "]"); - } - } - - return null; + return extractOptionalEnum( + map, + SIMILARITY, + scope, + SimilarityMeasure::fromString, + EnumSet.allOf(SimilarityMeasure.class), + validationException + ); } public static String extractRequiredString( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 4ac6ec7220de4..e25dabd7e5520 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -216,13 +216,16 @@ public void checkModelConfig(Model model, ActionListener listener) { } private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsModel model, int embeddingSize) { + var similarityFromModel = model.getServiceSettings().similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + CohereEmbeddingsServiceSettings serviceSettings = new CohereEmbeddingsServiceSettings( new CohereServiceSettings( - model.getServiceSettings().getCommonSettings().getUri(), - SimilarityMeasure.DOT_PRODUCT, + model.getServiceSettings().getCommonSettings().uri(), + similarityToUse, embeddingSize, - model.getServiceSettings().getCommonSettings().getMaxInputTokens(), - model.getServiceSettings().getCommonSettings().getModelId() + model.getServiceSettings().getCommonSettings().maxInputTokens(), + model.getServiceSettings().getCommonSettings().modelId() ), model.getServiceSettings().getEmbeddingType() ); @@ -232,6 +235,6 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_EMBEDDING_BYTE_ADDED; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 8ea271cdb64a5..7fc9ce08e9857 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -65,10 +65,10 @@ public static CohereServiceSettings fromMap(Map map, Configurati throw validationException; } - return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, getModelId(oldModelId, modelId)); + return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, modelId(oldModelId, modelId)); } - private static String getModelId(@Nullable String model, @Nullable String modelId) { + private static String modelId(@Nullable String model, @Nullable String modelId) { return modelId != null ? modelId : model; } @@ -110,23 +110,25 @@ public CohereServiceSettings(StreamInput in) throws IOException { modelId = in.readOptionalString(); } - public URI getUri() { + public URI uri() { return uri; } - public SimilarityMeasure getSimilarity() { + @Override + public SimilarityMeasure similarity() { return similarity; } - public Integer getDimensions() { + @Override + public Integer dimensions() { return dimensions; } - public Integer getMaxInputTokens() { + public Integer maxInputTokens() { return maxInputTokens; } - public String getModelId() { + public String modelId() { return modelId; } @@ -179,7 +181,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); out.writeOptionalString(modelId); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 22f652e73526f..ec639c3fac61c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; @@ -96,6 +97,16 @@ public CohereServiceSettings getCommonSettings() { return commonSettings; } + @Override + public SimilarityMeasure similarity() { + return commonSettings.similarity(); + } + + @Override + public Integer dimensions() { + return commonSettings.dimensions(); + } + public CohereEmbeddingType getEmbeddingType() { return embeddingType; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 02090ee84e708..5e51746253ebe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -406,7 +406,7 @@ public boolean isInClusterService() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 4445c5674277f..3347917bab2b5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -11,8 +11,9 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; -import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; @@ -26,6 +27,9 @@ public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInt public static final String NAME = "multilingual_e5_small_service_settings"; + static final int DIMENSIONS = 384; + static final SimilarityMeasure SIMILARITY = SimilarityMeasure.COSINE; + public MultilingualE5SmallInternalServiceSettings(int numAllocations, int numThreads, String modelId) { super(numAllocations, numThreads, modelId); } @@ -45,6 +49,16 @@ public MultilingualE5SmallInternalServiceSettings(StreamInput in) throws IOExcep */ public static MultilingualE5SmallInternalServiceSettings.Builder fromMap(Map map) { ValidationException validationException = new ValidationException(); + var requestFields = extractRequestFields(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return createBuilder(requestFields); + } + + private static RequestFields extractRequestFields(Map map, ValidationException validationException) { Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); @@ -62,26 +76,23 @@ public static MultilingualE5SmallInternalServiceSettings.Builder fromMap(Map listener) { private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(HuggingFaceEmbeddingsModel model, int embeddingSize) { var serviceSettings = new HuggingFaceServiceSettings( model.getServiceSettings().uri(), - null, // Similarity measure is unknown + model.getServiceSettings().similarity(), // we don't know the similarity but use whatever the user specified embeddingSize, model.getTokenLimit() ); @@ -76,6 +76,6 @@ public String name() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_12_0; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index b9c412fc425c6..b151e9c800a74 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -135,7 +135,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); } @@ -145,10 +145,12 @@ public URI uri() { return uri; } + @Override public SimilarityMeasure similarity() { return similarity; } + @Override public Integer dimensions() { return dimensions; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 83e5eef45fda4..6ce9b73f6b87c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -248,11 +248,14 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo ); } + var similarityFromModel = model.getServiceSettings().similarity(); + var similarityToUse = similarityFromModel == null ? SimilarityMeasure.DOT_PRODUCT : similarityFromModel; + OpenAiEmbeddingsServiceSettings serviceSettings = new OpenAiEmbeddingsServiceSettings( model.getServiceSettings().modelId(), model.getServiceSettings().uri(), model.getServiceSettings().organizationId(), - SimilarityMeasure.DOT_PRODUCT, + similarityToUse, embeddingSize, model.getServiceSettings().maxInputTokens(), model.getServiceSettings().dimensionsSetByUser() @@ -263,7 +266,7 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_COMPLETION_INFERENCE_SERVICE_ADDED; + return TransportVersions.ML_INFERENCE_L2_NORM_SIMILARITY_ADDED; } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index c821039e3fc76..1e5c93ea9ae22 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -192,10 +192,12 @@ public String organizationId() { return organizationId; } + @Override public SimilarityMeasure similarity() { return similarity; } + @Override public Integer dimensions() { return dimensions; } @@ -277,8 +279,9 @@ public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); out.writeOptionalString(organizationId); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { - out.writeOptionalEnum(similarity); + out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion())); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java index d106274280ed5..32911eeb44adf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestTests.java @@ -178,7 +178,7 @@ public void testCreateRequest_TruncateNone() throws IOException { } public static CohereEmbeddingsRequest createRequest(List input, CohereEmbeddingsModel model) { - var account = new CohereAccount(model.getServiceSettings().getCommonSettings().getUri(), model.getSecretSettings().apiKey()); + var account = new CohereAccount(model.getServiceSettings().getCommonSettings().uri(), model.getSecretSettings().apiKey()); return new CohereEmbeddingsRequest(account, input, model); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index b5ea720490b5a..7805c6706dc61 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -151,7 +151,7 @@ public void testFromMap_PrefersModelId_OverModel() { public void testFromMap_MissingUrl_DoesNotThrowException() { var serviceSettings = CohereServiceSettings.fromMap(new HashMap<>(Map.of()), ConfigurationParseContext.PERSISTENT); - assertNull(serviceSettings.getUri()); + assertNull(serviceSettings.uri()); } public void testFromMap_EmptyUrl_ThrowsError() { @@ -196,7 +196,10 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { MatcherAssert.assertThat( thrownException.getMessage(), - is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];") + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index fa21cd9bf7841..3a5527466c126 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; @@ -99,8 +100,8 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), @@ -131,8 +132,8 @@ public void testParseRequestConfig_OptionalTaskSettings() throws IOException { MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), equalTo(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); @@ -257,7 +258,7 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() thr MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); }, (e) -> fail("Model parsing should have succeeded " + e.getMessage())); @@ -295,8 +296,8 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -345,7 +346,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWit MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -370,8 +371,8 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.BYTE)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), @@ -402,7 +403,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -427,8 +428,8 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -451,7 +452,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -478,8 +479,8 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTa MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -497,8 +498,8 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOEx MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); assertNull(embeddingsModel.getSecretSettings()); } @@ -535,8 +536,8 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutUrl() t MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().uri()); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); assertNull(embeddingsModel.getSecretSettings()); @@ -556,7 +557,7 @@ public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); assertNull(embeddingsModel.getSecretSettings()); } @@ -574,7 +575,7 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettin MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); assertNull(embeddingsModel.getSecretSettings()); } @@ -595,8 +596,8 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings( MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); assertNull(embeddingsModel.getSecretSettings()); } @@ -755,6 +756,135 @@ public void testCheckModelConfig_UpdatesDimensions() throws IOException { } } + public void testCheckModelConfig_UpdatesSimilarityToDotProduct_WhenItIsNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 1, + null, + null + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result, + // the dimension is set to 2 because there are 2 embeddings returned from the mock server + is( + CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 2, + null, + null, + SimilarityMeasure.DOT_PRODUCT + ) + ) + ); + } + } + + public void testCheckModelConfig_DoesNotUpdateSimilarity_WhenItIsSpecifiedAsCosine() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "id": "de37399c-5df6-47cb-bc57-e3c5680c977b", + "texts": [ + "hello" + ], + "embeddings": { + "float": [ + [ + 0.123, + -0.123 + ] + ] + }, + "meta": { + "api_version": { + "version": "1" + }, + "billed_units": { + "input_tokens": 1 + } + }, + "response_type": "embeddings_by_type" + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 1, + null, + null, + SimilarityMeasure.COSINE + ); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + var result = listener.actionGet(TIMEOUT); + + MatcherAssert.assertThat( + result, + // the dimension is set to 2 because there are 2 embeddings returned from the mock server + is( + CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + CohereEmbeddingsTaskSettings.EMPTY_SETTINGS, + 10, + 2, + null, + null, + SimilarityMeasure.COSINE + ) + ) + ); + } + } + public void testInfer_UnauthorisedResponse() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java index 194b71f9ea32a..b80414adce8c8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -225,4 +225,27 @@ public static CohereEmbeddingsModel createModel( new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType, + @Nullable SimilarityMeasure similarityMeasure + ) { + return new CohereEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(url, similarityMeasure, dimensions, tokenLimit, model), + Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) + ), + taskSettings, + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 073712beb8050..80ceb855f9e94 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -29,6 +30,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.util.ArrayList; @@ -235,16 +237,17 @@ public void testParsePersistedConfig() { settings.put( ModelConfigurations.SERVICE_SETTINGS, new HashMap<>( - Map.of(ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, 1, ElasticsearchInternalServiceSettings.NUM_THREADS, 4) + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + ServiceFields.SIMILARITY, + SimilarityMeasure.L2_NORM.toString() + ) ) ); - var e5ServiceSettings = new MultilingualE5SmallInternalServiceSettings( - 1, - 4, - ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID - ); - expectThrows(IllegalArgumentException.class, () -> service.parsePersistedConfig(randomInferenceEntityId, taskType, settings)); } @@ -290,7 +293,9 @@ public void testParsePersistedConfig() { ElasticsearchInternalServiceSettings.NUM_THREADS, 4, InternalServiceSettings.MODEL_ID, - ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ElasticsearchInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID, + ServiceFields.DIMENSIONS, + 1 ) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index f32fafd493395..6fd56c30516be 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -117,7 +117,13 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity))) ); - assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + assertThat( + thrownException.getMessage(), + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) + ); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index c4c49065cd79c..be18963cc72c1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.http.MockResponse; @@ -512,6 +513,59 @@ public void testCheckModelConfig_IncludesMaxTokens() throws IOException { } } + public void testCheckModelConfig_UsesUserSpecifiedSimilarity() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 2, SimilarityMeasure.COSINE); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat( + result, + is(HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 1, SimilarityMeasure.COSINE)) + ); + } + } + + public void testCheckModelConfig_LeavesSimilarityAsNull_WhenUnspecified() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new HuggingFaceService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 2, null); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result, is(HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 1, null))); + } + } + private HuggingFaceService createHuggingFaceService() { return new HuggingFaceService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java index cb37ccfead45d..7fcfd5ee46a90 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.inference.services.huggingface.embeddings; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; @@ -52,4 +54,20 @@ public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static HuggingFaceEmbeddingsModel createModel( + String url, + String apiKey, + int tokenLimit, + int dimensions, + @Nullable SimilarityMeasure similarityMeasure + ) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(createUri(url), similarityMeasure, dimensions, tokenLimit), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 96a5b2d48e4e4..6cfcf974d3250 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -952,6 +952,118 @@ public void testCheckModelConfig_ReturnsModelWithSameDimensions_AndDocProductSet public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensionsField_WhenNotSetByUser() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", null, 100, 100, false); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var returnedModel = listener.actionGet(TIMEOUT); + assertThat( + returnedModel, + is( + OpenAiEmbeddingsModelTests.createModel( + getUrl(webServer), + "org", + "secret", + "model", + "user", + SimilarityMeasure.DOT_PRODUCT, + 100, + 2, + false + ) + ) + ); + + assertThat(webServer.requests(), hasSize(1)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, Matchers.is(Map.of("input", List.of("how big"), "model", "model", "user", "user"))); + } + } + + public void testCheckModelConfig_ReturnsNewModelReference_SetsSimilarityToDocProduct_WhenNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", null, 100, 100, false); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var returnedModel = listener.actionGet(TIMEOUT); + assertThat( + returnedModel, + is( + OpenAiEmbeddingsModelTests.createModel( + getUrl(webServer), + "org", + "secret", + "model", + "user", + SimilarityMeasure.DOT_PRODUCT, + 100, + 2, + false + ) + ) + ); + + assertThat(webServer.requests(), hasSize(1)); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + MatcherAssert.assertThat(requestMap, Matchers.is(Map.of("input", List.of("how big"), "model", "model", "user", "user"))); + } + } + + public void testCheckModelConfig_ReturnsNewModelReference_DoesNotOverrideSimilarity_WhenNotNull() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { String responseJson = """ @@ -1000,7 +1112,7 @@ public void testCheckModelConfig_ReturnsNewModelReference_AndDoesNotSendDimensio "secret", "model", "user", - SimilarityMeasure.DOT_PRODUCT, + SimilarityMeasure.COSINE, 100, 2, false diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index e37318a0c96d4..0ada6d96195e6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -303,7 +303,13 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { ) ); - assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + assertThat( + thrownException.getMessage(), + is( + "Validation Failed: 1: [service_settings] Invalid value [by_size] received. [similarity] " + + "must be one of [cosine, dot_product, l2_norm];" + ) + ); } public void testToXContent_WritesDimensionsSetByUserTrue() throws IOException { From b40b17601cfc0141eba827e0df9f03b83545d31b Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 27 Mar 2024 15:10:44 +0000 Subject: [PATCH 214/214] [ML] Make OpenAI embeddings parser more flexible (#106808) Fixes a parse failure that was dependent on the order of the fields --- docs/changelog/106808.yaml | 5 + .../external/response/XContentUtils.java | 29 +++- .../CohereEmbeddingsResponseEntity.java | 13 +- .../HuggingFaceElserResponseEntity.java | 6 +- .../OpenAiEmbeddingsResponseEntity.java | 8 +- .../external/response/XContentUtilsTests.java | 132 ++++++++++++++++++ .../OpenAiEmbeddingsResponseEntityTests.java | 59 ++++++++ 7 files changed, 236 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/106808.yaml diff --git a/docs/changelog/106808.yaml b/docs/changelog/106808.yaml new file mode 100644 index 0000000000000..287477fc302fd --- /dev/null +++ b/docs/changelog/106808.yaml @@ -0,0 +1,5 @@ +pr: 106808 +summary: Make OpenAI embeddings parser more flexible +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 3511cbda1841b..42fd0ddc812ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -37,17 +37,42 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { * @throws IllegalStateException if the field cannot be found */ public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { - XContentParser.Token token; + XContentParser.Token token = parser.nextToken(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (token != null && token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; } + token = parser.nextToken(); } throw new IllegalStateException(format(errorMsgTemplate, field)); } + /** + * Progress the parser consuming and discarding tokens until the + * parser points to the end of the current object. Nested objects + * and arrays are skipped. + * + * If successful the parser's current token is the end object token. + * + * @param parser + * @throws IOException + */ + public static void consumeUntilObjectEnd(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + + // token == null when correctly formed input has + // been fully parsed. + while (token != null && token != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + + token = parser.nextToken(); + } + } + private XContentUtils() {} } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java index bd808c225d7e3..9221e5c5deed8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereEmbeddingsResponseEntity.java @@ -159,18 +159,17 @@ public static InferenceServiceResults fromResponse(Request request, HttpResult r } private static InferenceServiceResults parseEmbeddingsObject(XContentParser parser) throws IOException { - XContentParser.Token token; + XContentParser.Token token = parser.nextToken(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while (token != null && token != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { var embeddingValueParser = EMBEDDING_PARSERS.get(parser.currentName()); - if (embeddingValueParser == null) { - continue; + if (embeddingValueParser != null) { + parser.nextToken(); + return embeddingValueParser.apply(parser); } - - parser.nextToken(); - return embeddingValueParser.apply(parser); } + token = parser.nextToken(); } throw new IllegalStateException( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 247537b9958d0..7b7d6c0d06b2b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -83,13 +83,15 @@ private static SparseEmbeddingResults.Embedding parseExpansionResult(boolean[] t XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); List weightedTokens = new ArrayList<>(); - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + token = parser.nextToken(); + while (token != null && token != XContentParser.Token.END_OBJECT) { XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); var floatToken = parser.nextToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, floatToken, parser); weightedTokens.add(new SparseEmbeddingResults.WeightedToken(parser.currentName(), parser.floatValue())); + + token = parser.nextToken(); } // prevent an out of bounds if for some reason the truncation list is smaller than the results diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index 4926ba3f0ef6b..4bfdec9a3669b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.List; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.consumeUntilObjectEnd; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; @@ -95,11 +96,8 @@ private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParse positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingValues = XContentParserUtils.parseList(parser, OpenAiEmbeddingsResponseEntity::parseEmbeddingList); - - // the parser is currently sitting at an ARRAY_END so go to the next token - parser.nextToken(); - // if there are additional fields within this object, lets skip them, so we can begin parsing the next embedding array - parser.skipChildren(); + // parse and discard the rest of the object + consumeUntilObjectEnd(parser); return new TextEmbeddingResults.Embedding(embeddingValues); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index c8de0371ab196..4f7cd9ea89a14 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -8,12 +8,15 @@ package org.elasticsearch.xpack.inference.external.response; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentEOFException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.Locale; +import static org.hamcrest.Matchers.containsString; + public class XContentUtilsTests extends ESTestCase { public void testMoveToFirstToken() throws IOException { @@ -83,4 +86,133 @@ public void testPositionParserAtTokenAfterField_ThrowsIfFieldIsMissing() throws assertEquals(String.format(Locale.ROOT, errorFormat, missingField), exception.getMessage()); } } + + public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws IOException { + var json = """ + { + "key": "value", + "foo": "bar" + """; + var errorFormat = "Error: %s"; + var missingField = "missing field"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + var exception = expectThrows( + XContentEOFException.class, + () -> XContentUtils.positionParserAtTokenAfterField(parser, missingField, errorFormat) + ); + + assertThat(exception.getMessage(), containsString("Unexpected end-of-input")); + } + } + + public void testConsumeUntilObjectEnd() throws IOException { + var json = """ + { + "key": "value", + "foo": true, + "bar": 0.1 + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + parser.nextToken(); + parser.nextToken(); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); // fully parsed + } + } + + public void testConsumeUntilObjectEnd_SkipArray() throws IOException { + var json = """ + { + "key": "value", + "skip_array": [1.0, 2.0, 3.0] + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + } + + public void testConsumeUntilObjectEnd_SkipNestedObject() throws IOException { + var json = """ + { + "key": "value", + "skip_obj": { + "foo": "bar" + } + } + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); // fully parsed + } + } + + public void testConsumeUntilObjectEnd_InArray() throws IOException { + var json = """ + [ + { + "key": "value", + "skip_obj": { + "foo": "bar" + } + }, + { + "key": "value", + "skip_array": [1.0, 2.0, 3.0] + }, + { + "key": "value", + "skip_field1": "f1", + "skip_field2": "f2" + } + ] + """; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + + // Parser now inside object 1 + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("key", parser.currentName()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + // Start of object 2 + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + // Start of object 3 + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("skip_field1", parser.currentName()); + XContentUtils.consumeUntilObjectEnd(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + + assertEquals(XContentParser.Token.END_ARRAY, parser.nextToken()); + assertNull(parser.nextToken()); // fully parsed + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 010e990a3ce80..4583ba9d21b6d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -327,4 +327,63 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") ); } + + public void testFieldsInDifferentOrderServer() throws IOException { + // The fields of the objects in the data array are reordered + String response = """ + { + "created": 1711530064, + "object": "list", + "id": "6667830b-716b-4796-9a61-33b67b5cc81d", + "model": "mxbai-embed-large-v1", + "data": [ + { + "embedding": [ + -0.9, + 0.5, + 0.3 + ], + "index": 0, + "object": "embedding" + }, + { + "index": 0, + "embedding": [ + 0.1, + 0.5 + ], + "object": "embedding" + }, + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.5, + 0.5 + ] + } + ], + "usage": { + "prompt_tokens": 0, + "completion_tokens": 0, + "total_tokens": 0 + } + }"""; + + TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), response.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(-0.9F, 0.5F, 0.3F)), + new TextEmbeddingResults.Embedding(List.of(0.1F, 0.5F)), + new TextEmbeddingResults.Embedding(List.of(0.5F, 0.5F)) + ) + ) + ); + } }